2 # Copyright (c) 2012 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Build NativeClient toolchain packages."""
8 # Done first to setup python module path.
21 import local_storage_cache
24 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
25 NACL_DIR = os.path.dirname(SCRIPT_DIR)
26 ROOT_DIR = os.path.dirname(NACL_DIR)
28 DEFAULT_CACHE_DIR = os.path.join(SCRIPT_DIR, 'cache')
29 DEFAULT_SRC_DIR = os.path.join(SCRIPT_DIR, 'src')
30 DEFAULT_OUT_DIR = os.path.join(SCRIPT_DIR, 'out')
33 def PrintFlush(message):
34 """Print to stdout and flush.
36 Windows flushes stdout very intermittently (particularly through the
37 buildbot). Forcing an immediate flush so that buildbot annotator section
38 headings appear at the right place in relation to logging output which goes
45 def PrintAnnotatorURL(url):
46 """Print an URL in buildbot annotator form.
51 PrintFlush('@@@STEP_LINK@download@%s@@@' % url)
54 class PackageBuilder(object):
55 """Module to build a setup of packages."""
57 def __init__(self, packages, args):
61 packages: A dictionary with the following format:
66 [<list of command.Command objects to run>],
67 'dependencies': # optional
68 [<list of package depdenencies>],
69 'unpack_commands': # optional
70 [<list of command.Command objects for unpacking inputs
71 before they are hashed>'],
72 'hashed_inputs': # optional
73 [<list of paths to use for build signature>],
76 REPO_SRC_INFO is either:
77 'git_url': '<git repo url>',
78 'git_revision': '<git hex digest to sync>',
80 'tar_src': '<root relative path to source tarball>',
81 args: sys.argv[1:] or equivalent.
83 self._packages = packages
84 self.DecodeArgs(packages, args)
86 self._build_once = once.Once(
87 use_cached_results=self._options.use_cached_results,
88 cache_results=self._options.cache_results,
89 print_url=PrintAnnotatorURL,
90 storage=self.CreateStorage())
93 """Main entry point."""
94 if self._options.clobber:
95 PrintFlush('@@@BUILD_STEP clobber@@@')
96 file_tools.RemoveDirectoryIfPresent(self._options.source)
97 file_tools.RemoveDirectoryIfPresent(self._options.output)
101 def SetupLogging(self):
102 """Setup python logging based on options."""
103 if self._options.verbose:
104 logging.getLogger().setLevel(logging.DEBUG)
106 logging.getLogger().setLevel(logging.INFO)
107 logging.basicConfig(format='%(levelname)s: %(message)s')
109 def SyncGitRepo(self, package):
110 """Sync the git repo for a package.
113 package: Package name to sync.
115 PrintFlush('@@@BUILD_STEP sync %s@@@' % package)
116 package_info = self._packages[package]
117 url = package_info['git_url']
118 revision = package_info['git_revision']
119 destination = os.path.join(self._options.source, package)
120 logging.info('Syncing %s...' % package)
121 if self._options.reclone:
122 file_tools.RemoveDirectoryIfPresent(destination)
123 if sys.platform == 'win32':
124 # On windows, we want to use the depot_tools version of git, which has
125 # git.bat as an entry point. When running through the msys command
126 # prompt, subprocess does not handle batch files. Explicitly invoking
127 # cmd.exe to be sure we run the correct git in this case.
128 git = ['cmd.exe', '/c', 'git.bat']
131 if not os.path.exists(destination):
132 logging.info('Cloning %s...' % package)
133 log_tools.CheckCall(git + ['clone', '-n', url, destination])
134 if self._options.pinned:
135 logging.info('Checking out pinned revision...')
136 log_tools.CheckCall(git + ['fetch', '--all'], cwd=destination)
137 log_tools.CheckCall(git + ['checkout', '-f', revision], cwd=destination)
138 log_tools.CheckCall(git + ['clean', '-dffx'], cwd=destination)
139 logging.info('Done syncing %s.' % package)
141 def BuildPackage(self, package):
142 """Build a single package.
144 Assumes dependencies of the package have been built.
146 package: Package to build.
148 PrintFlush('@@@BUILD_STEP build %s@@@' % package)
149 package_info = self._packages[package]
150 dependencies = package_info.get('dependencies', [])
151 # Collect a dict of all the inputs.
153 # Add in either a tar source or a git source.
154 if 'tar_src' in package_info:
155 inputs['src'] = os.path.join(ROOT_DIR, package_info['tar_src'])
157 inputs['src'] = os.path.join(self._options.source, package)
158 # Add in each dependency by package name.
159 for dependency in dependencies:
160 inputs[dependency] = os.path.join(
161 self._options.output, dependency + '_install')
162 # Each package generates intermediate into output/<PACKAGE>_work.
163 # Clobbered here explicitly.
164 work_dir = os.path.join(self._options.output, package + '_work')
165 file_tools.RemoveDirectoryIfPresent(work_dir)
167 # Each package emits its output to output/<PACKAGE>_install.
168 # Clobbered implicitly by Run().
169 output = os.path.join(self._options.output, package + '_install')
170 # A package may define an alternate set of inputs to be used for
171 # computing the build signature. These are assumed to be in the working
173 hashed_inputs = package_info.get('hashed_inputs')
174 if hashed_inputs is not None:
175 for key, value in hashed_inputs.iteritems():
176 hashed_inputs[key] = os.path.join(work_dir, value)
178 self._build_once.Run(
179 package, inputs, output,
180 commands=package_info.get('commands', []),
181 unpack_commands=package_info.get('unpack_commands', []),
182 hashed_inputs=hashed_inputs,
183 working_dir=work_dir)
185 def BuildOrder(self, targets):
186 """Find what needs to be built in what order to build all targets.
189 targets: A list of target packages to build.
191 A topologically sorted list of the targets plus their transitive
192 dependencies, in an order that will allow things to be built.
196 def Add(target, target_path):
197 if target in order_set:
199 if target not in self._packages:
200 raise Exception('Unknown package %s' % target)
201 next_target_path = target_path + [target]
202 if target in target_path:
203 raise Exception('Dependency cycle: %s' % ' -> '.join(next_target_path))
204 for dependency in self._packages[target].get('dependencies', []):
205 Add(dependency, next_target_path)
207 order_set.add(target)
208 for target in targets:
213 """Sync all packages selected and their dependencies."""
214 file_tools.MakeDirectoryIfAbsent(self._options.source)
215 for target in self._targets:
216 # Only packages using git repos need to be synced.
217 if 'git_url' in self._packages[target]:
218 self.SyncGitRepo(target)
221 """Build all packages selected and their dependencies."""
222 file_tools.MakeDirectoryIfAbsent(self._options.output)
223 for target in self._targets:
224 self.BuildPackage(target)
226 def DecodeArgs(self, packages, args):
227 """Decode command line arguments to this build.
229 Populated self._options and self._targets.
231 packages: A list of package names to build.
232 args: sys.argv[1:] or equivalent.
234 package_list = sorted(packages.keys())
235 parser = optparse.OptionParser(
236 usage='USAGE: %prog [options] [targets...]\n\n'
237 'Available targets:\n' +
238 '\n'.join(textwrap.wrap(' '.join(package_list))))
240 '-v', '--verbose', dest='verbose',
241 default=False, action='store_true',
242 help='Produce more output.')
244 '-c', '--clobber', dest='clobber',
245 default=False, action='store_true',
246 help='Clobber source and output directories.')
248 '--cache', dest='cache',
249 default=DEFAULT_CACHE_DIR,
250 help='Select directory containing local storage cache.')
252 '-s', '--source', dest='source',
253 default=DEFAULT_SRC_DIR,
254 help='Select directory containing source checkouts.')
256 '-o', '--output', dest='output',
257 default=DEFAULT_OUT_DIR,
258 help='Select directory containing build output.')
260 '--no-use-cached-results', dest='use_cached_results',
261 default=True, action='store_false',
262 help='Do not rely on cached results.')
264 '--no-use-remote-cache', dest='use_remote_cache',
265 default=True, action='store_false',
266 help='Do not rely on non-local cached results.')
268 '--no-cache-results', dest='cache_results',
269 default=True, action='store_false',
270 help='Do not cache results.')
272 '--reclone', dest='reclone',
273 default=False, action='store_true',
274 help='Clone source trees from scratch.')
276 '--no-pinned', dest='pinned',
277 default=True, action='store_false',
278 help='Do not use pinned revisions.')
280 '--trybot', dest='trybot',
281 default=False, action='store_true',
282 help='Run and cache as if on trybot.')
284 '--buildbot', dest='buildbot',
285 default=False, action='store_true',
286 help='Run and cache as if on a non-trybot buildbot.')
287 options, targets = parser.parse_args(args)
288 if options.trybot and options.buildbot:
289 PrintFlush('ERROR: Tried to run with both --trybot and --buildbot.')
291 if options.trybot or options.buildbot:
292 options.verbose = True
294 targets = sorted(packages.keys())
295 targets = self.BuildOrder(targets)
296 self._options = options
297 self._targets = targets
299 def CreateStorage(self):
300 """Create a storage object for this build.
303 A storage object (GSDStorage).
305 if self._options.buildbot:
306 return gsd_storage.GSDStorage(
307 write_bucket='nativeclient-once',
308 read_buckets=['nativeclient-once'])
309 elif self._options.trybot:
310 return gsd_storage.GSDStorage(
311 write_bucket='nativeclient-once-try',
312 read_buckets=['nativeclient-once', 'nativeclient-once-try'])
315 if self._options.use_remote_cache:
316 read_buckets += ['nativeclient-once']
317 return local_storage_cache.LocalStorageCache(
318 cache_path=self._options.cache,
319 storage=gsd_storage.GSDStorage(
321 read_buckets=read_buckets))