2 # Copyright (c) 2012 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Build NativeClient toolchain packages."""
8 # Done first to setup python module path.
22 import local_storage_cache
25 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
26 NACL_DIR = os.path.dirname(SCRIPT_DIR)
27 ROOT_DIR = os.path.dirname(NACL_DIR)
29 DEFAULT_CACHE_DIR = os.path.join(SCRIPT_DIR, 'cache')
30 DEFAULT_SRC_DIR = os.path.join(SCRIPT_DIR, 'src')
31 DEFAULT_OUT_DIR = os.path.join(SCRIPT_DIR, 'out')
33 def PrintFlush(message):
34 """Flush stdout and print a message to stderr.
36 Buildbot annotator messages must be at the beginning of a line, and we want to
37 ensure that any output from the script or from subprocesses appears in the
38 correct order wrt BUILD_STEP messages. So we flush stdout before printing all
39 buildbot messages here.
42 print >>sys.stderr, message
44 def PrintAnnotatorURL(url):
45 """Print an URL in buildbot annotator form.
50 PrintFlush('@@@STEP_LINK@download@%s@@@' % url)
53 class PackageBuilder(object):
54 """Module to build a setup of packages."""
56 def __init__(self, packages, args):
60 packages: A dictionary with the following format. There are two types of
61 packages: source and build (described below).
65 # Source packages are for sources; in particular remote sources
66 # where it is not known whether they have changed until they are
67 # synced (it can also or for tarballs which need to be
68 # unpacked). Source package commands are run unconditionally
69 # unless sync is skipped via the command-line option. Source
70 # package contents are not memoized.
71 'dependencies': # optional
72 [<list of package depdenencies>],
73 'output_dirname': # optional
74 '<directory name>', # Name of the directory to checkout sources
75 # into (a subdirectory of the global source directory); defaults
76 # to the package name.
78 [<list of command.Runnable objects to run>],
80 {<mapping whose keys are names, and whose values are files or
81 directories (e.g. checked-in tarballs) used as input. Since
82 source targets are unconditional, this is only useful as a
83 convenience for commands, which may refer to the inputs by their
88 # Build packages are memoized, and will build only if their
89 # inputs have changed. Their inputs consist of the output of
90 # their package dependencies plus any file or directory inputs
91 # given by their 'inputs' member
92 'dependencies': # optional
93 [<list of package depdenencies>],
95 {<mapping whose keys are names, and whose values are files or
96 directories (e.g. checked-in tarballs) used as input>},
97 'output_subdir': # optional
98 '<directory name>', # Name of a subdir to be created in the output
99 # directory, into which all output will be placed. If not present
100 # output will go into the root of the output directory.
102 [<list of command.Command objects to run>],
103 # Objects that have a 'skip_for_incremental' attribute that
104 # evaluates to True will not be run on incremental builds unless
105 # the working directory is empty.
108 args: sys.argv[1:] or equivalent.
110 self._packages = packages
111 self.DecodeArgs(packages, args)
112 self._build_once = once.Once(
113 use_cached_results=self._options.use_cached_results,
114 cache_results=self._options.cache_results,
115 print_url=PrintAnnotatorURL,
116 storage=self.CreateStorage())
117 self._signature_file = None
118 if self._options.emit_signatures is not None:
119 if self._options.emit_signatures == '-':
120 self._signature_file = sys.stdout
122 self._signature_file = open(self._options.emit_signatures, 'w')
125 """Main entry point."""
126 file_tools.MakeDirectoryIfAbsent(self._options.source)
127 file_tools.MakeDirectoryIfAbsent(self._options.output)
128 log_tools.SetupLogging(self._options.verbose,
129 open(os.path.join(self._options.output,
130 'toolchain_build.log'), 'w'))
133 def GetOutputDir(self, package, use_subdir):
134 # The output dir of source packages is in the source directory, and can be
136 if self._packages[package]['type'] == 'source':
137 dirname = self._packages[package].get('output_dirname', package)
138 return os.path.join(self._options.source, dirname)
140 root = os.path.join(self._options.output, package + '_install')
141 if use_subdir and 'output_subdir' in self._packages[package]:
142 return os.path.join(root, self._packages[package]['output_subdir'])
145 def BuildPackage(self, package):
146 """Build a single package.
148 Assumes dependencies of the package have been built.
150 package: Package to build.
153 package_info = self._packages[package]
155 # Validate the package description.
156 if 'type' not in package_info:
157 raise Exception('package %s does not have a type' % package)
158 type_text = package_info['type']
159 if type_text not in ('source', 'build'):
160 raise Execption('package %s has unrecognized type: %s' %
161 (package, type_text))
162 is_source_target = type_text == 'source'
164 if 'commands' not in package_info:
165 raise Exception('package %s does not have any commands' % package)
167 # Source targets are the only ones to run when doing sync-only.
168 if not is_source_target and self._options.sync_sources_only:
169 logging.debug('Build skipped: not running commands for %s' % package)
172 # Source targets do not run when skipping sync.
173 if is_source_target and not (
174 self._options.sync_sources or self._options.sync_sources_only):
175 logging.debug('Sync skipped: not running commands for %s' % package)
178 PrintFlush('@@@BUILD_STEP %s (%s)@@@' % (package, type_text))
179 logging.debug('Building %s package %s' % (type_text, package))
181 dependencies = package_info.get('dependencies', [])
183 # Collect a dict of all the inputs.
185 # Add in explicit inputs.
186 if 'inputs' in package_info:
187 for key, value in package_info['inputs'].iteritems():
188 if key in dependencies:
189 raise Exception('key "%s" found in both dependencies and inputs of '
190 'package "%s"' % (key, package))
193 inputs['src'] = os.path.join(self._options.source, package)
194 # Add in each dependency by package name.
195 for dependency in dependencies:
196 inputs[dependency] = self.GetOutputDir(dependency, True)
198 # Each package generates intermediate into output/<PACKAGE>_work.
199 # Clobbered here explicitly.
200 work_dir = os.path.join(self._options.output, package + '_work')
201 if self._options.clobber:
202 logging.debug('Clobbering working directory %s' % work_dir)
203 file_tools.RemoveDirectoryIfPresent(work_dir)
204 file_tools.MakeDirectoryIfAbsent(work_dir)
206 output = self.GetOutputDir(package, False)
207 output_subdir = self.GetOutputDir(package, True)
209 if not is_source_target or self._options.clobber_source:
210 logging.debug('Clobbering output directory %s' % output)
211 file_tools.RemoveDirectoryIfPresent(output)
212 os.makedirs(output_subdir)
214 commands = package_info.get('commands', [])
215 if not self._options.clobber and len(os.listdir(work_dir)) > 0:
216 commands = [cmd for cmd in commands if
217 not (hasattr(cmd, 'skip_for_incremental') and
218 cmd.skip_for_incremental)]
220 self._build_once.Run(
221 package, inputs, output,
223 working_dir=work_dir,
224 memoize=not is_source_target,
225 signature_file=self._signature_file,
226 subdir=output_subdir)
228 if not is_source_target and self._options.install:
229 logging.debug('Installing output to %s' % self._options.install)
230 file_tools.CopyTree(output, self._options.install)
232 def BuildOrder(self, targets):
233 """Find what needs to be built in what order to build all targets.
236 targets: A list of target packages to build.
238 A topologically sorted list of the targets plus their transitive
239 dependencies, in an order that will allow things to be built.
243 if self._options.ignore_dependencies:
245 def Add(target, target_path):
246 if target in order_set:
248 if target not in self._packages:
249 raise Exception('Unknown package %s' % target)
250 next_target_path = target_path + [target]
251 if target in target_path:
252 raise Exception('Dependency cycle: %s' % ' -> '.join(next_target_path))
253 for dependency in self._packages[target].get('dependencies', []):
254 Add(dependency, next_target_path)
256 order_set.add(target)
257 for target in targets:
262 """Build all packages selected and their dependencies."""
263 for target in self._targets:
264 self.BuildPackage(target)
266 def DecodeArgs(self, packages, args):
267 """Decode command line arguments to this build.
269 Populated self._options and self._targets.
271 packages: A list of package names to build.
272 args: sys.argv[1:] or equivalent.
274 package_list = sorted(packages.keys())
275 parser = optparse.OptionParser(
276 usage='USAGE: %prog [options] [targets...]\n\n'
277 'Available targets:\n' +
278 '\n'.join(textwrap.wrap(' '.join(package_list))))
280 '-v', '--verbose', dest='verbose',
281 default=False, action='store_true',
282 help='Produce more output.')
284 '-c', '--clobber', dest='clobber',
285 default=False, action='store_true',
286 help='Clobber working directories before building.')
288 '--cache', dest='cache',
289 default=DEFAULT_CACHE_DIR,
290 help='Select directory containing local storage cache.')
292 '-s', '--source', dest='source',
293 default=DEFAULT_SRC_DIR,
294 help='Select directory containing source checkouts.')
296 '-o', '--output', dest='output',
297 default=DEFAULT_OUT_DIR,
298 help='Select directory containing build output.')
300 '--no-use-cached-results', dest='use_cached_results',
301 default=True, action='store_false',
302 help='Do not rely on cached results.')
304 '--no-use-remote-cache', dest='use_remote_cache',
305 default=True, action='store_false',
306 help='Do not rely on non-local cached results.')
308 '--no-cache-results', dest='cache_results',
309 default=True, action='store_false',
310 help='Do not cache results.')
312 '--no-pinned', dest='pinned',
313 default=True, action='store_false',
314 help='Do not use pinned revisions.')
316 '--trybot', dest='trybot',
317 default=False, action='store_true',
318 help='Run and cache as if on trybot.')
320 '--buildbot', dest='buildbot',
321 default=False, action='store_true',
322 help='Run and cache as if on a non-trybot buildbot.')
324 '--clobber-source', dest='clobber_source',
325 default=False, action='store_true',
326 help='Clobber source directories before building')
328 '-y', '--sync', dest='sync_sources',
329 default=False, action='store_true',
330 help='Run source target commands')
332 '--sync-only', dest='sync_sources_only',
333 default=False, action='store_true',
334 help='Run source target commands only')
336 '--emit-signatures', dest='emit_signatures',
337 help='Write human readable build signature for each step to FILE.',
340 '-i', '--ignore-dependencies', dest='ignore_dependencies',
341 default=False, action='store_true',
342 help='Ignore target dependencies and build only the specified target.')
343 parser.add_option('--install', dest='install',
344 help='After building, copy contents of build packages' +
345 ' to the specified directory')
346 options, targets = parser.parse_args(args)
347 if options.trybot and options.buildbot:
348 print >>sys.stderr, (
349 'ERROR: Tried to run with both --trybot and --buildbot.')
351 if options.trybot or options.buildbot:
352 options.verbose = True
353 options.sync_sources = True
354 options.clobber = True
355 self._options = options
357 if self._options.ignore_dependencies:
358 print >>sys.stderr, (
359 'ERROR: A target must be specified if ignoring target dependencies')
361 targets = sorted(packages.keys())
362 targets = self.BuildOrder(targets)
363 self._targets = targets
365 def CreateStorage(self):
366 """Create a storage object for this build.
369 A storage object (GSDStorage).
371 if self._options.buildbot:
372 return gsd_storage.GSDStorage(
373 write_bucket='nativeclient-once',
374 read_buckets=['nativeclient-once'])
375 elif self._options.trybot:
376 return gsd_storage.GSDStorage(
377 write_bucket='nativeclient-once-try',
378 read_buckets=['nativeclient-once', 'nativeclient-once-try'])
381 if self._options.use_remote_cache:
382 read_buckets += ['nativeclient-once']
383 return local_storage_cache.LocalStorageCache(
384 cache_path=self._options.cache,
385 storage=gsd_storage.GSDStorage(
387 read_buckets=read_buckets))