2 # Copyright 2020 The Chromium Authors
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """MB - the Meta-Build wrapper around GN.
8 MB is a wrapper script for GN that can be used to generate build files
9 for sets of canned configurations and analyze them.
30 CHROMIUM_SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(
31 os.path.abspath(__file__))))
32 sys.path = [os.path.join(CHROMIUM_SRC_DIR, 'build')] + sys.path
33 sys.path.insert(0, os.path.join(
34 os.path.dirname(os.path.abspath(__file__)), '..'))
37 from mb.lib import validation
41 """Default mixin values"""
48 def PruneVirtualEnv():
49 # Set by VirtualEnv, no need to keep it.
50 os.environ.pop('VIRTUAL_ENV', None)
52 # Set by VPython, if scripts want it back they have to set it explicitly.
53 os.environ.pop('PYTHONNOUSERSITE', None)
55 # Look for "activate_this.py" in this path, which is installed by VirtualEnv.
56 # This mechanism is used by vpython as well to sanitize VirtualEnvs from
58 os.environ['PATH'] = os.pathsep.join([
59 p for p in os.environ.get('PATH', '').split(os.pathsep)
60 if not os.path.isfile(os.path.join(p, 'activate_this.py'))
65 # Prune all evidence of VPython/VirtualEnv out of the environment. This means
66 # that we 'unwrap' vpython VirtualEnv path/env manipulation. Invocations of
67 # `python` from GN should never inherit the gn.py's own VirtualEnv. This also
68 # helps to ensure that generated ninja files do not reference python.exe from
69 # the VirtualEnv generated from depot_tools' own .vpython file (or lack
70 # thereof), but instead reference the default python from the PATH.
73 mbw = MetaBuildWrapper()
77 class MetaBuildWrapper:
79 self.chromium_src_dir = CHROMIUM_SRC_DIR
80 self.default_config = os.path.join(self.chromium_src_dir, 'tools', 'mb',
82 self.default_isolate_map = os.path.join(self.chromium_src_dir, 'testing',
83 'buildbot', 'gn_isolate_map.pyl')
84 self.executable = sys.executable
85 self.platform = sys.platform
87 self.args = argparse.Namespace()
89 self.public_artifact_builders = None
90 self.gn_args_locations_files = []
91 self.builder_groups = {}
93 self.isolate_exe = 'isolate.exe' if self.platform.startswith(
95 self.use_luci_auth = False
97 def PostArgsInit(self):
98 self.use_luci_auth = getattr(self.args, 'luci_auth', False)
100 if 'config_file' in self.args and self.args.config_file is None:
101 self.args.config_file = self.default_config
103 if 'expectations_dir' in self.args and self.args.expectations_dir is None:
104 self.args.expectations_dir = os.path.join(
105 os.path.dirname(self.args.config_file), 'mb_config_expectations')
107 def Main(self, args):
111 ret = self.args.func()
113 self.DumpInputFiles()
115 except KeyboardInterrupt:
116 self.Print('interrupted, exiting')
119 self.DumpInputFiles()
120 s = traceback.format_exc()
121 for l in s.splitlines():
125 def ParseArgs(self, argv):
126 def AddCommonOptions(subp):
127 group = subp.add_mutually_exclusive_group()
129 '-m', '--builder-group',
130 help='builder group name to look up config from')
131 subp.add_argument('-b', '--builder',
132 help='builder name to look up config from')
133 subp.add_argument('-c', '--config',
134 help='configuration to analyze')
135 subp.add_argument('--phase',
136 help='optional phase name (used when builders '
137 'do multiple compiles with different '
138 'arguments in a single build)')
139 subp.add_argument('-i', '--isolate-map-file', metavar='PATH',
140 help='path to isolate map file '
141 '(default is %(default)s)',
144 dest='isolate_map_files')
145 subp.add_argument('-n', '--dryrun', action='store_true',
146 help='Do a dry run (i.e., do nothing, just print '
147 'the commands that will run)')
148 subp.add_argument('-v', '--verbose', action='store_true',
149 help='verbose logging')
150 subp.add_argument('--root', help='Path to GN source root')
151 subp.add_argument('--dotfile', help='Path to GN dotfile')
152 AddExpansionOptions(subp)
154 def AddExpansionOptions(subp):
155 # These are the args needed to expand a config file into the full
156 # parsed dicts of GN args.
157 subp.add_argument('-f',
160 help=('path to config file '
161 '(default is mb_config.pyl'))
162 subp.add_argument('-g', '--goma-dir', help='path to goma directory')
163 subp.add_argument('--android-version-code',
164 help='Sets GN arg android_default_version_code')
165 subp.add_argument('--android-version-name',
166 help='Sets GN arg android_default_version_name')
168 # TODO(crbug.com/1060857): Remove this once swarming task templates
169 # support command prefixes.
170 luci_auth_group = subp.add_mutually_exclusive_group()
171 luci_auth_group.add_argument(
174 help='Run isolated commands under `luci-auth context`.')
175 luci_auth_group.add_argument(
177 action='store_false',
179 help='Do not run isolated commands under `luci-auth context`.')
181 parser = argparse.ArgumentParser(
182 prog='mb', description='mb (meta-build) is a python wrapper around GN. '
183 'See the user guide in '
184 '//tools/mb/docs/user_guide.md for detailed usage '
187 subps = parser.add_subparsers()
189 subp = subps.add_parser('analyze',
190 description='Analyze whether changes to a set of '
191 'files will cause a set of binaries to '
193 AddCommonOptions(subp)
194 subp.add_argument('path',
195 help='path build was generated into.')
196 subp.add_argument('input_path',
197 help='path to a file containing the input arguments '
199 subp.add_argument('output_path',
200 help='path to a file containing the output arguments '
202 subp.add_argument('--json-output',
203 help='Write errors to json.output')
204 subp.set_defaults(func=self.CmdAnalyze)
206 subp = subps.add_parser('export',
207 description='Print out the expanded configuration '
208 'for each builder as a JSON object.')
209 AddExpansionOptions(subp)
210 subp.set_defaults(func=self.CmdExport)
212 subp = subps.add_parser('get-swarming-command',
213 description='Get the command needed to run the '
214 'binary under swarming')
215 AddCommonOptions(subp)
216 subp.add_argument('--no-build',
219 action='store_false',
220 help='Do not build, just isolate')
221 subp.add_argument('--as-list',
223 help='return the command line as a JSON-formatted '
224 'list of strings instead of single string')
225 subp.add_argument('path',
226 help=('path to generate build into (or use).'
227 ' This can be either a regular path or a '
228 'GN-style source-relative path like '
230 subp.add_argument('target', help='ninja target to build and run')
231 subp.set_defaults(func=self.CmdGetSwarmingCommand)
233 subp = subps.add_parser('train',
234 description='Writes the expanded configuration '
235 'for each builder as JSON files to a configured '
237 subp.add_argument('-f',
240 help='path to config file (default is mb_config.pyl')
241 subp.add_argument('--expectations-dir',
243 help='path to dir containing expectation files')
244 subp.add_argument('-n',
247 help='Do a dry run (i.e., do nothing, just print '
248 'the commands that will run)')
249 subp.add_argument('-v',
252 help='verbose logging')
253 subp.set_defaults(func=self.CmdTrain)
255 subp = subps.add_parser('gen',
256 description='Generate a new set of build files.')
257 AddCommonOptions(subp)
258 subp.add_argument('--swarming-targets-file',
259 help='generates runtime dependencies for targets listed '
260 'in file as .isolate and .isolated.gen.json files. '
261 'Targets should be listed by name, separated by '
263 subp.add_argument('--json-output',
264 help='Write errors to json.output')
265 subp.add_argument('path',
266 help='path to generate build into')
267 subp.set_defaults(func=self.CmdGen)
269 subp = subps.add_parser('isolate-everything',
270 description='Generates a .isolate for all targets. '
271 'Requires that mb.py gen has already '
273 AddCommonOptions(subp)
274 subp.set_defaults(func=self.CmdIsolateEverything)
275 subp.add_argument('path',
276 help='path build was generated into')
277 subp = subps.add_parser('isolate',
278 description='Generate the .isolate files for a '
280 AddCommonOptions(subp)
281 subp.add_argument('--no-build', dest='build', default=True,
282 action='store_false',
283 help='Do not build, just isolate')
284 subp.add_argument('-j', '--jobs', type=int,
285 help='Number of jobs to pass to ninja')
286 subp.add_argument('path',
287 help='path build was generated into')
288 subp.add_argument('target',
289 help='ninja target to generate the isolate for')
290 subp.set_defaults(func=self.CmdIsolate)
292 subp = subps.add_parser('lookup',
293 description='Look up the command for a given '
294 'config or builder.')
295 AddCommonOptions(subp)
296 subp.add_argument('--quiet', default=False, action='store_true',
297 help='Print out just the arguments, '
298 'do not emulate the output of the gen subcommand.')
299 subp.add_argument('--recursive', default=False, action='store_true',
300 help='Lookup arguments from imported files, '
302 subp.set_defaults(func=self.CmdLookup)
304 subp = subps.add_parser(
305 'run', formatter_class=argparse.RawDescriptionHelpFormatter)
307 'Build, isolate, and run the given binary with the command line\n'
308 'listed in the isolate. You may pass extra arguments after the\n'
309 'target; use "--" if the extra arguments need to include switches.\n'
313 ' % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n'
314 ' //out/Default content_browsertests\n'
316 ' % tools/mb/mb.py run out/Default content_browsertests\n'
318 ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n'
319 ' --test-launcher-retry-limit=0'
322 AddCommonOptions(subp)
323 subp.add_argument('-j', '--jobs', type=int,
324 help='Number of jobs to pass to ninja')
325 subp.add_argument('--no-build', dest='build', default=True,
326 action='store_false',
327 help='Do not build, just isolate and run')
328 subp.add_argument('path',
329 help=('path to generate build into (or use).'
330 ' This can be either a regular path or a '
331 'GN-style source-relative path like '
333 subp.add_argument('-s', '--swarmed', action='store_true',
334 help='Run under swarming with the default dimensions')
335 subp.add_argument('-d', '--dimension', default=[], action='append', nargs=2,
336 dest='dimensions', metavar='FOO bar',
337 help='dimension to filter on')
338 subp.add_argument('--internal',
340 help=('Run under the internal swarming server '
341 '(chrome-swarming) instead of the public server '
342 '(chromium-swarm).'))
343 subp.add_argument('--no-bot-mode',
345 action='store_false',
347 help='Do not run the test with bot mode.')
348 subp.add_argument('--realm',
350 help=('Optional realm used when triggering swarming '
352 subp.add_argument('--service-account',
354 help=('Optional service account to run the swarming '
356 subp.add_argument('--tags', default=[], action='append', metavar='FOO:BAR',
357 help='Tags to assign to the swarming task')
358 subp.add_argument('--no-default-dimensions', action='store_false',
359 dest='default_dimensions', default=True,
360 help='Do not automatically add dimensions to the task')
361 subp.add_argument('target',
362 help='ninja target to build and run')
363 subp.add_argument('extra_args', nargs='*',
364 help=('extra args to pass to the isolate to run. Use '
365 '"--" as the first arg if you need to pass '
367 subp.set_defaults(func=self.CmdRun)
369 subp = subps.add_parser('validate',
370 description='Validate the config file.')
371 AddExpansionOptions(subp)
372 subp.add_argument('--expectations-dir',
374 help='path to dir containing expectation files')
375 subp.add_argument('--skip-dcheck-check',
376 help='Skip check for dcheck_always_on.',
378 subp.set_defaults(func=self.CmdValidate)
380 subp = subps.add_parser('zip',
381 description='Generate a .zip containing the files '
382 'needed for a given binary.')
383 AddCommonOptions(subp)
384 subp.add_argument('--no-build', dest='build', default=True,
385 action='store_false',
386 help='Do not build, just isolate')
387 subp.add_argument('-j', '--jobs', type=int,
388 help='Number of jobs to pass to ninja')
389 subp.add_argument('path',
390 help='path build was generated into')
391 subp.add_argument('target',
392 help='ninja target to generate the isolate for')
393 subp.add_argument('zip_path',
394 help='path to zip file to create')
395 subp.set_defaults(func=self.CmdZip)
397 subp = subps.add_parser('help',
398 help='Get help on a subcommand.')
399 subp.add_argument(nargs='?', action='store', dest='subcommand',
400 help='The command to get help for.')
401 subp.set_defaults(func=self.CmdHelp)
403 self.args = parser.parse_args(argv)
405 def DumpInputFiles(self):
407 def DumpContentsOfFilePassedTo(arg_name, path):
408 if path and self.Exists(path):
409 self.Print("\n# To recreate the file passed to %s:" % arg_name)
410 self.Print("%% cat > %s <<EOF" % path)
411 contents = self.ReadFile(path)
413 self.Print("EOF\n%\n")
415 if getattr(self.args, 'input_path', None):
416 DumpContentsOfFilePassedTo(
417 'argv[0] (input_path)', self.args.input_path)
418 if getattr(self.args, 'swarming_targets_file', None):
419 DumpContentsOfFilePassedTo(
420 '--swarming-targets-file', self.args.swarming_targets_file)
422 def CmdAnalyze(self):
424 return self.RunGNAnalyze(vals)
427 obj = self._ToJsonish()
428 s = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
433 expectations_dir = self.args.expectations_dir
434 if not self.Exists(expectations_dir):
435 self.Print('Expectations dir (%s) does not exist.' % expectations_dir)
437 # Removing every expectation file then immediately re-generating them will
438 # clear out deleted groups.
439 for f in self.ListDir(expectations_dir):
440 self.RemoveFile(os.path.join(expectations_dir, f))
441 obj = self._ToJsonish()
442 for builder_group, builder in sorted(obj.items()):
443 expectation_file = os.path.join(expectations_dir, builder_group + '.json')
444 json_s = json.dumps(builder,
447 separators=(',', ': '))
448 self.WriteFile(expectation_file, json_s)
453 return self.RunGNGen(vals)
455 def CmdGetSwarmingCommand(self):
456 vals = self.GetConfig()
457 command, _ = self.GetSwarmingCommand(self.args.target, vals)
458 if self.args.as_list:
459 self.Print(json.dumps(command))
461 self.Print(' '.join(command))
464 def CmdIsolateEverything(self):
466 return self.RunGNGenAllIsolates(vals)
469 if self.args.subcommand:
470 self.ParseArgs([self.args.subcommand, '--help'])
472 self.ParseArgs(['--help'])
474 def CmdIsolate(self):
475 vals = self.GetConfig()
479 ret = self.Build(self.args.target)
482 return self.RunGNIsolate(vals)
486 _, gn_args = self.GNArgs(vals, expand_imports=self.args.recursive)
487 if self.args.quiet or self.args.recursive:
488 self.Print(gn_args, end='')
490 cmd = self.GNCmd('gen', '_path_')
491 self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args)
496 vals = self.GetConfig()
501 ret = self.Build(self.args.target)
506 ret = self.RunGNIsolate(vals)
511 if self.args.swarmed:
512 cmd, _ = self.GetSwarmingCommand(self.args.target, vals)
513 return self._RunUnderSwarming(self.args.path, self.args.target, cmd,
515 return self._RunLocallyIsolated(self.args.path, self.args.target)
518 ret = self.CmdIsolate()
524 zip_dir = self.TempDir()
526 self.PathJoin(self.chromium_src_dir, 'tools', 'luci-go',
527 self.isolate_exe), 'remap', '-i',
528 self.PathJoin(self.args.path, self.args.target + '.isolate'),
531 ret, _, _ = self.Run(remap_cmd)
535 zip_path = self.args.zip_path
536 with zipfile.ZipFile(
537 zip_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as fp:
538 for root, _, files in os.walk(zip_dir):
539 for filename in files:
540 path = self.PathJoin(root, filename)
541 fp.write(path, self.RelPath(path, zip_dir))
545 self.RemoveDirectory(zip_dir)
547 def _RunUnderSwarming(self, build_dir, target, isolate_cmd, internal):
549 cas_instance = 'chrome-swarming'
550 swarming_server = 'chrome-swarming.appspot.com'
551 realm = 'chrome:try' if not self.args.realm else self.args.realm
552 account = 'chrome-tester@chops-service-accounts.iam.gserviceaccount.com'
554 cas_instance = 'chromium-swarm'
555 swarming_server = 'chromium-swarm.appspot.com'
556 realm = self.args.realm
557 account = 'chromium-tester@chops-service-accounts.iam.gserviceaccount.com'
558 account = (self.args.service_account
559 if self.args.service_account else account)
560 # TODO(dpranke): Look up the information for the target in
561 # the //testing/buildbot.json file, if possible, so that we
562 # can determine the isolate target, command line, and additional
563 # swarming parameters, if possible.
565 # TODO(dpranke): Also, add support for sharding and merging results.
567 for k, v in self._DefaultDimensions() + self.args.dimensions:
568 dimensions += ['-d', '%s=%s' % (k, v)]
570 archive_json_path = self.ToSrcRelPath(
571 '%s/%s.archive.json' % (build_dir, target))
573 self.PathJoin(self.chromium_src_dir, 'tools', 'luci-go',
577 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
584 # Talking to the isolateserver may fail because we're not logged in.
585 # We trap the command explicitly and rewrite the error output so that
586 # the error message is actually correct for a Chromium check out.
588 ret, out, _ = self.Run(cmd, force_verbose=False)
590 self.Print(' -> returned %d' % ret)
592 self.Print(out, end='')
596 archive_hashes = json.loads(self.ReadFile(archive_json_path))
599 'Failed to read JSON file "%s"' % archive_json_path, file=sys.stderr)
602 cas_digest = archive_hashes[target]
605 'Cannot find hash for "%s" in "%s", file content: %s' %
606 (target, archive_json_path, archive_hashes),
610 tags = ['-tag=%s' % tag for tag in self.args.tags]
613 json_dir = self.TempDir()
614 json_file = self.PathJoin(json_dir, 'task.json')
616 self.PathJoin('tools', 'luci-go', 'swarming'),
622 # 30 is try level. So use the same here.
627 '-tag=purpose:user-debug-mb',
629 self.ToSrcRelPath(build_dir),
634 cmd += ['--realm', realm]
635 cmd += tags + dimensions + ['--'] + list(isolate_cmd)
636 if self.args.extra_args:
637 cmd += self.args.extra_args
639 ret, _, _ = self.Run(cmd, force_verbose=True, capture_output=False)
642 task_json = self.ReadFile(json_file)
643 task_id = json.loads(task_json)["tasks"][0]['task_id']
644 collect_output = self.PathJoin(json_dir, 'collect_output.json')
646 self.PathJoin('tools', 'luci-go', 'swarming'),
650 '-task-output-stdout=console',
651 '-task-summary-json',
655 ret, _, _ = self.Run(cmd, force_verbose=True, capture_output=False)
658 collect_json = json.loads(self.ReadFile(collect_output))
659 # The exit_code field might not be included if the task was successful.
661 collect_json.get(task_id, {}).get('results', {}).get('exit_code', 0))
664 self.RemoveDirectory(json_dir)
667 def _RunLocallyIsolated(self, build_dir, target):
669 self.PathJoin(self.chromium_src_dir, 'tools', 'luci-go',
673 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
675 if self.args.extra_args:
676 cmd += ['--'] + self.args.extra_args
677 ret, _, _ = self.Run(cmd, force_verbose=True, capture_output=False)
680 def _DefaultDimensions(self):
681 if not self.args.default_dimensions:
684 # This code is naive and just picks reasonable defaults per platform.
685 if self.platform == 'darwin':
686 os_dim = ('os', 'Mac-10.13')
687 elif self.platform.startswith('linux'):
688 os_dim = ('os', 'Ubuntu-16.04')
689 elif self.platform == 'win32':
690 os_dim = ('os', 'Windows-10')
692 raise MBErr('unrecognized platform string "%s"' % self.platform)
694 return [('pool', 'chromium.tests'),
698 def _ToJsonish(self):
699 """Dumps the config file into a json-friendly expanded dict.
702 A dict with builder group -> builder -> all GN args mapping.
704 self.ReadConfigFile(self.args.config_file)
706 for builder_group, builders in self.builder_groups.items():
707 obj[builder_group] = {}
708 for builder in builders:
709 config = self.builder_groups[builder_group][builder]
714 flattened_config = FlattenConfig(self.configs, self.mixins, config)
715 if flattened_config['gn_args'] == 'error':
717 args = {'gn_args': gn_helpers.FromGNArgs(flattened_config['gn_args'])}
718 if flattened_config.get('args_file'):
719 args['args_file'] = flattened_config['args_file']
722 if isinstance(config, dict):
723 # This is a 'phased' builder. Each key in the config is a different
724 # phase of the builder.
726 for k, v in config.items():
727 flattened = flatten(v)
728 if flattened is None:
731 elif config.startswith('//'):
734 args = flatten(config)
737 obj[builder_group][builder] = args
741 def CmdValidate(self, print_ok=True):
744 self.ReadConfigFile(self.args.config_file)
746 # Build a list of all of the configs referenced by builders.
747 all_configs = validation.GetAllConfigs(self.builder_groups)
749 # Check that every referenced args file or config actually exists.
750 for config, loc in all_configs.items():
751 if config.startswith('//'):
752 if not self.Exists(self.ToAbsPath(config)):
753 errs.append('Unknown args file "%s" referenced from "%s".' %
755 elif not config in self.configs:
756 errs.append('Unknown config "%s" referenced from "%s".' %
759 # Check that every config and mixin is referenced.
760 validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
761 self.configs, self.mixins)
763 if self.args.config_file == self.default_config:
764 validation.EnsureNoProprietaryMixins(errs, self.builder_groups,
765 self.configs, self.mixins)
767 validation.CheckDuplicateConfigs(errs, self.configs, self.mixins,
768 self.builder_groups, FlattenConfig)
770 if not self.args.skip_dcheck_check:
771 self._ValidateEach(errs, validation.CheckDebugDCheckOrOfficial)
774 raise MBErr(('mb config file %s has problems:\n ' %
775 self.args.config_file) + '\n '.join(errs))
777 expectations_dir = self.args.expectations_dir
778 # TODO(crbug.com/1117577): Force all versions of mb_config.pyl to have
779 # expectations. For now, just ignore those that don't have them.
780 if self.Exists(expectations_dir):
781 jsonish_blob = self._ToJsonish()
782 if not validation.CheckExpectations(self, jsonish_blob, expectations_dir):
783 raise MBErr("Expectations out of date. Run 'tools/mb/mb.py train'.")
785 validation.CheckKeyOrdering(errs, self.builder_groups, self.configs,
788 raise MBErr('mb config file not sorted:\n' + '\n'.join(errs))
791 self.Print('mb config file %s looks ok.' % self.args.config_file)
794 def _ValidateEach(self, errs, validate):
795 """Checks a validate function against every builder config.
797 This loops over all the builders in the config file, invoking the
798 validate function against the full set of GN args. Any errors found
799 should be appended to the errs list passed in; the validation
800 function signature is
802 validate(errs:list, gn_args:dict, builder_group:str, builder:str,
805 for builder_group, builders in self.builder_groups.items():
806 for builder, config in builders.items():
807 if isinstance(config, dict):
808 for phase, phase_config in config.items():
809 vals = FlattenConfig(self.configs, self.mixins, phase_config)
810 if vals['gn_args'] == 'error':
813 parsed_gn_args, _ = self.GNArgs(vals, expand_imports=True)
815 # The builder must use an args file that was not checked out or
816 # generated, so we should just ignore it.
817 parsed_gn_args, _ = self.GNArgs(vals, expand_imports=False)
818 validate(errs, parsed_gn_args, builder_group, builder, phase)
820 vals = FlattenConfig(self.configs, self.mixins, config)
821 if vals['gn_args'] == 'error':
824 parsed_gn_args, _ = self.GNArgs(vals, expand_imports=True)
826 # The builder must use an args file that was not checked out or
827 # generated, so we should just ignore it.
828 parsed_gn_args, _ = self.GNArgs(vals, expand_imports=False)
829 validate(errs, parsed_gn_args, builder_group, builder, phase=None)
832 build_dir = self.args.path
835 if self.args.builder or self.args.builder_group or self.args.config:
837 # Re-run gn gen in order to ensure the config is consistent with the
842 toolchain_path = self.PathJoin(self.ToAbsPath(build_dir),
844 if not self.Exists(toolchain_path):
845 self.Print('Must either specify a path to an existing GN build dir '
846 'or pass in a -m/-b pair or a -c flag to specify the '
850 vals['gn_args'] = self.GNArgsFromDir(build_dir)
853 def GNArgsFromDir(self, build_dir):
855 gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn')
856 if self.Exists(gn_args_path):
857 args_contents = self.ReadFile(gn_args_path)
859 # Handle any .gni file imports, e.g. the ones used by CrOS. This should
860 # be automatically handled by gn_helpers.FromGNArgs (via its call to
861 # gn_helpers.GNValueParser.ReplaceImports), but that currently breaks
862 # mb_unittest since it mocks out file reads itself instead of using
863 # pyfakefs. This results in gn_helpers trying to read a non-existent file.
864 # The implementation of ReplaceImports here can be removed once the
865 # unittests use pyfakefs.
866 def ReplaceImports(input_contents):
868 for l in input_contents.splitlines(True):
869 if not l.strip().startswith('#') and 'import(' in l:
870 import_file = l.split('"', 2)[1]
871 import_file = self.ToAbsPath(import_file)
872 imported_contents = self.ReadFile(import_file)
873 output_contents += ReplaceImports(imported_contents) + '\n'
876 return output_contents
878 args_contents = ReplaceImports(args_contents)
879 args_dict = gn_helpers.FromGNArgs(args_contents)
880 return self._convert_args_dict_to_args_string(args_dict)
882 def _convert_args_dict_to_args_string(self, args_dict):
883 """Format a dict of GN args into a single string."""
884 for k, v in args_dict.items():
885 if isinstance(v, str):
886 # Re-add the quotes around strings so they show up as they would in the
888 args_dict[k] = '"%s"' % v
889 elif isinstance(v, bool):
890 # Convert boolean values to lower case strings.
891 args_dict[k] = str(v).lower()
892 return ' '.join(['%s=%s' % (k, v) for (k, v) in args_dict.items()])
895 self.ReadConfigFile(self.args.config_file)
897 config = self.ConfigFromArgs()
899 # TODO(crbug.com/912681) While iOS bots are migrated to use the
900 # Chromium recipe, we want to ensure that we're checking MB's
901 # configurations first before going to iOS.
902 # This is to be removed once the migration is complete.
903 vals = self.ReadIOSBotConfig()
908 # "config" would be a dict if the GN args are loaded from a
909 # starlark-generated file.
910 if isinstance(config, dict):
913 # TODO(crbug.com/912681) Some iOS bots have a definition, with ios_error
914 # as an indicator that it's incorrect. We utilize this to check the
915 # iOS JSON instead, and error out if there exists no definition at all.
916 # This is to be removed once the migration is complete.
917 if config == 'ios_error':
918 vals = self.ReadIOSBotConfig()
920 raise MBErr('No iOS definition was found. Please ensure there is a '
921 'definition for the given iOS bot under '
922 'mb_config.pyl or a JSON file definition under '
926 if config.startswith('//'):
927 if not self.Exists(self.ToAbsPath(config)):
928 raise MBErr('args file "%s" not found' % config)
930 vals['args_file'] = config
932 if not config in self.configs:
934 'Config "%s" not found in %s' % (config, self.args.config_file))
935 vals = FlattenConfig(self.configs, self.mixins, config)
938 def ReadIOSBotConfig(self):
939 if not self.args.builder_group or not self.args.builder:
941 path = self.PathJoin(self.chromium_src_dir, 'ios', 'build', 'bots',
942 self.args.builder_group, self.args.builder + '.json')
943 if not self.Exists(path):
946 contents = json.loads(self.ReadFile(path))
947 gn_args = ' '.join(contents.get('gn_args', []))
950 vals['gn_args'] = gn_args
953 def ReadConfigFile(self, config_file):
954 if not self.Exists(config_file):
955 raise MBErr('config file not found at %s' % config_file)
958 contents = ast.literal_eval(self.ReadFile(config_file))
959 except SyntaxError as e:
960 raise MBErr('Failed to parse config file "%s": %s' %
961 (config_file, e)) from e
963 self.configs = contents['configs']
964 self.mixins = contents['mixins']
965 self.gn_args_locations_files = contents.get('gn_args_locations_files', [])
966 self.builder_groups = contents.get('builder_groups')
967 self.public_artifact_builders = contents.get('public_artifact_builders')
969 def ReadIsolateMap(self):
970 if not self.args.isolate_map_files:
971 self.args.isolate_map_files = [self.default_isolate_map]
973 for f in self.args.isolate_map_files:
974 if not self.Exists(f):
975 raise MBErr('isolate map file not found at %s' % f)
977 for isolate_map in self.args.isolate_map_files:
979 isolate_map = ast.literal_eval(self.ReadFile(isolate_map))
980 duplicates = set(isolate_map).intersection(isolate_maps)
983 'Duplicate targets in isolate map files: %s.' %
984 ', '.join(duplicates))
985 isolate_maps.update(isolate_map)
986 except SyntaxError as e:
987 raise MBErr('Failed to parse isolate map file "%s": %s' %
988 (isolate_map, e)) from e
991 def ConfigFromArgs(self):
993 if self.args.builder_group or self.args.builder:
994 raise MBErr('Can not specific both -c/--config and --builder-group '
997 return self.args.config
999 if not self.args.builder_group or not self.args.builder:
1000 raise MBErr('Must specify either -c/--config or '
1001 '(--builder-group and -b/--builder)')
1003 # Try finding gn-args.json generated by starlark definition.
1004 for gn_args_locations_file in self.gn_args_locations_files:
1005 locations_file_abs_path = os.path.join(
1006 os.path.dirname(self.args.config_file),
1007 os.path.normpath(gn_args_locations_file))
1008 if not self.Exists(locations_file_abs_path):
1010 gn_args_locations = json.loads(self.ReadFile(locations_file_abs_path))
1011 gn_args_file = gn_args_locations.get(self.args.builder_group,
1012 {}).get(self.args.builder, None)
1014 gn_args_dict = json.loads(
1016 os.path.join(os.path.dirname(locations_file_abs_path),
1017 os.path.normpath(gn_args_file))))
1019 if 'phases' in gn_args_dict:
1020 # The builder has phased GN config.
1021 if self.args.phase is None:
1022 raise MBErr('Must specify a build --phase for %s on %s' %
1023 (self.args.builder, self.args.builder_group))
1024 phase = str(self.args.phase)
1025 phase_configs = gn_args_dict['phases']
1026 if phase not in phase_configs:
1027 raise MBErr('Phase %s doesn\'t exist for %s on %s' %
1028 (phase, self.args.builder, self.args.builder_group))
1029 gn_args_dict = phase_configs[phase]
1031 # Non-phased GN config.
1032 if self.args.phase is not None:
1033 raise MBErr('Must not specify a build --phase for %s on %s' %
1034 (self.args.builder, self.args.builder_group))
1037 gn_args_dict.get('args_file', ''),
1039 self._convert_args_dict_to_args_string(
1040 gn_args_dict.get('gn_args', {})) or ''
1043 if not self.args.builder_group in self.builder_groups:
1044 raise MBErr('Builder group name "%s" not found in "%s"' %
1045 (self.args.builder_group, self.args.config_file))
1047 if not self.args.builder in self.builder_groups[self.args.builder_group]:
1048 raise MBErr('Builder name "%s" not found under groups[%s] in "%s"' %
1049 (self.args.builder, self.args.builder_group,
1050 self.args.config_file))
1052 config = self.builder_groups[self.args.builder_group][self.args.builder]
1053 if isinstance(config, dict):
1054 if self.args.phase is None:
1055 raise MBErr('Must specify a build --phase for %s on %s' %
1056 (self.args.builder, self.args.builder_group))
1057 phase = str(self.args.phase)
1058 if phase not in config:
1059 raise MBErr('Phase %s doesn\'t exist for %s on %s' %
1060 (phase, self.args.builder, self.args.builder_group))
1061 return config[phase]
1063 if self.args.phase is not None:
1064 raise MBErr('Must not specify a build --phase for %s on %s' %
1065 (self.args.builder, self.args.builder_group))
1068 def RunGNGen(self, vals, compute_inputs_for_analyze=False, check=True):
1069 build_dir = self.args.path
1072 cmd = self.GNCmd('gen', build_dir, '--check')
1074 cmd = self.GNCmd('gen', build_dir)
1075 _, gn_args = self.GNArgs(vals)
1076 if compute_inputs_for_analyze:
1077 gn_args += ' compute_inputs_for_analyze=true'
1079 # Since GN hasn't run yet, the build directory may not even exist.
1080 self.MaybeMakeDirectory(self.ToAbsPath(build_dir))
1082 gn_args_path = self.ToAbsPath(build_dir, 'args.gn')
1083 self.WriteFile(gn_args_path, gn_args, force_verbose=True)
1085 if getattr(self.args, 'swarming_targets_file', None):
1086 # We need GN to generate the list of runtime dependencies for
1087 # the compile targets listed (one per line) in the file so
1088 # we can run them via swarming. We use gn_isolate_map.pyl to convert
1089 # the compile targets to the matching GN labels.
1090 path = self.args.swarming_targets_file
1091 if not self.Exists(path):
1092 self.WriteFailureAndRaise('"%s" does not exist' % path,
1094 contents = self.ReadFile(path)
1095 isolate_targets = set(contents.splitlines())
1097 isolate_map = self.ReadIsolateMap()
1098 self.RemovePossiblyStaleRuntimeDepsFiles(vals, isolate_targets,
1099 isolate_map, build_dir)
1101 err, labels = self.MapTargetsToLabels(isolate_map, isolate_targets)
1105 gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps')
1106 self.WriteFile(gn_runtime_deps_path, '\n'.join(labels) + '\n')
1107 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
1109 # Write all generated targets to a JSON file called project.json
1111 cmd.append('--ide=json')
1112 cmd.append('--json-file-name=project.json')
1114 ret, output, _ = self.Run(cmd)
1116 if self.args.json_output:
1117 # write errors to json.output
1118 self.WriteJSON({'output': output}, self.args.json_output)
1119 # If `gn gen` failed, we should exit early rather than trying to
1120 # generate isolates. Run() will have already logged any error output.
1121 self.Print('GN gen failed: %d' % ret)
1124 if getattr(self.args, 'swarming_targets_file', None):
1125 ret = self.GenerateIsolates(vals, isolate_targets, isolate_map, build_dir)
1129 def RunGNGenAllIsolates(self, vals):
1131 This command generates all .isolate files.
1133 This command assumes that "mb.py gen" has already been run, as it relies on
1134 "gn ls" to fetch all gn targets. If uses that output, combined with the
1135 isolate_map, to determine all isolates that can be generated for the current
1138 build_dir = self.args.path
1139 ret, output, _ = self.Run(self.GNCmd('ls', build_dir),
1140 force_verbose=False)
1142 # If `gn ls` failed, we should exit early rather than trying to
1143 # generate isolates.
1144 self.Print('GN ls failed: %d' % ret)
1147 # Create a reverse map from isolate label to isolate dict.
1148 isolate_map = self.ReadIsolateMap()
1149 isolate_dict_map = {}
1150 for key, isolate_dict in isolate_map.items():
1151 isolate_dict_map[isolate_dict['label']] = isolate_dict
1152 isolate_dict_map[isolate_dict['label']]['isolate_key'] = key
1156 isolate_targets = []
1157 # For every GN target, look up the isolate dict.
1158 for line in output.splitlines():
1159 target = line.strip()
1160 if target in isolate_dict_map:
1161 if isolate_dict_map[target]['type'] == 'additional_compile_target':
1162 # By definition, additional_compile_targets are not tests, so we
1163 # shouldn't generate isolates for them.
1166 isolate_targets.append(isolate_dict_map[target]['isolate_key'])
1167 runtime_deps.append(target)
1169 self.RemovePossiblyStaleRuntimeDepsFiles(vals, isolate_targets,
1170 isolate_map, build_dir)
1172 gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps')
1173 self.WriteFile(gn_runtime_deps_path, '\n'.join(runtime_deps) + '\n')
1174 cmd = self.GNCmd('gen', build_dir)
1175 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
1178 return self.GenerateIsolates(vals, isolate_targets, isolate_map, build_dir)
1180 def RemovePossiblyStaleRuntimeDepsFiles(self, vals, targets, isolate_map,
1182 # TODO(crbug.com/932700): Because `gn gen --runtime-deps-list-file`
1183 # puts the runtime_deps file in different locations based on the actual
1184 # type of a target, we may end up with multiple possible runtime_deps
1185 # files in a given build directory, where some of the entries might be
1186 # stale (since we might be reusing an existing build directory).
1188 # We need to be able to get the right one reliably; you might think
1189 # we can just pick the newest file, but because GN won't update timestamps
1190 # if the contents of the files change, an older runtime_deps
1191 # file might actually be the one we should use over a newer one (see
1192 # crbug.com/932387 for a more complete explanation and example).
1194 # In order to avoid this, we need to delete any possible runtime_deps
1195 # files *prior* to running GN. As long as the files aren't actually
1196 # needed during the build, this hopefully will not cause unnecessary
1197 # build work, and so it should be safe.
1199 # Ultimately, we should just make sure we get the runtime_deps files
1200 # in predictable locations so we don't have this issue at all, and
1201 # that's what crbug.com/932700 is for.
1202 possible_rpaths = self.PossibleRuntimeDepsPaths(vals, targets, isolate_map)
1203 for rpaths in possible_rpaths.values():
1204 for rpath in rpaths:
1205 path = self.ToAbsPath(build_dir, rpath)
1206 if self.Exists(path):
1207 self.RemoveFile(path)
1209 def _FilterOutUnneededSkylabDeps(self, deps):
1210 """Filter out the runtime dependencies not used by Skylab.
1212 Skylab is CrOS infra facilities for us to run hardware tests. These files
1213 may appear in the test target's runtime_deps for browser lab, but
1214 unnecessary for CrOS lab.
1216 file_ignore_list = [
1217 re.compile(r'.*build/chromeos.*'),
1218 re.compile(r'.*build/cros_cache.*'),
1219 # No test target should rely on files in [output_dir]/gen.
1220 re.compile(r'^gen/.*'),
1222 return [f for f in deps if not any(r.match(f) for r in file_ignore_list)]
1224 def _DedupDependencies(self, deps):
1225 """Remove the deps already contained by other paths."""
1227 def _add(root, path):
1228 cur = path.popleft()
1229 # Only continue the recursion if the path has child nodes
1230 # AND the current node is not ended by other existing paths.
1231 if path and root.get(cur) != {}:
1232 return _add(root.setdefault(cur, {}), path)
1233 # Cut this path, because child nodes are already included.
1237 def _list(root, prefix, res):
1238 for k, v in root.items():
1240 res.append('%s/%s' % (prefix, k))
1242 _list(v, '%s/%s' % (prefix, k), res)
1247 q = collections.deque(d.rstrip('/').split('/'))
1249 return [p.lstrip('/') for p in _list(root, '', [])]
1251 def GenerateIsolates(self, vals, ninja_targets, isolate_map, build_dir):
1253 Generates isolates for a list of ninja targets.
1255 Ninja targets are transformed to GN targets via isolate_map.
1257 This function assumes that a previous invocation of "mb.py gen" has
1258 generated runtime deps for all targets.
1260 possible_rpaths = self.PossibleRuntimeDepsPaths(vals, ninja_targets,
1263 for target, rpaths in possible_rpaths.items():
1264 # TODO(crbug.com/932700): We don't know where each .runtime_deps
1265 # file might be, but assuming we called
1266 # RemovePossiblyStaleRuntimeDepsFiles prior to calling `gn gen`,
1267 # there should only be one file.
1271 path = self.ToAbsPath(build_dir, r)
1272 if self.Exists(path):
1274 raise MBErr('Found more than one of %s' % ', '.join(rpaths))
1279 raise MBErr('Did not find any of %s' % ', '.join(rpaths))
1281 command, extra_files = self.GetSwarmingCommand(target, vals)
1282 runtime_deps = self.ReadFile(path_to_use).splitlines()
1283 runtime_deps = self._DedupDependencies(runtime_deps)
1284 # TODO(crbug.com/1481305): Lacros gtest may need files from folders
1285 # filtered out here. Eventually, we should move the filter to builder
1286 # specific config. Before that, leave the filter only for Ash.
1287 if ('is_skylab=true' in vals['gn_args']
1288 and not 'chromeos_is_browser_only=true' in vals['gn_args']):
1289 runtime_deps = self._FilterOutUnneededSkylabDeps(runtime_deps)
1291 canonical_target = target.replace(':','_').replace('/','_')
1292 ret = self.WriteIsolateFiles(build_dir, command, canonical_target,
1293 runtime_deps, vals, extra_files)
1298 def PossibleRuntimeDepsPaths(self, vals, ninja_targets, isolate_map):
1299 """Returns a map of targets to possible .runtime_deps paths.
1301 Each ninja target maps on to a GN label, but depending on the type
1302 of the GN target, `gn gen --runtime-deps-list-file` will write
1303 the .runtime_deps files into different locations. Unfortunately, in
1304 some cases we don't actually know which of multiple locations will
1305 actually be used, so we return all plausible candidates.
1307 The paths that are returned are relative to the build directory.
1310 android = 'target_os="android"' in vals['gn_args']
1311 ios = 'target_os="ios"' in vals['gn_args']
1312 fuchsia = 'target_os="fuchsia"' in vals['gn_args']
1313 win = self.platform == 'win32' or 'target_os="win"' in vals['gn_args']
1314 possible_runtime_deps_rpaths = {}
1315 for target in ninja_targets:
1316 target_type = isolate_map[target]['type']
1317 label = isolate_map[target]['label']
1318 stamp_runtime_deps = 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')
1319 # TODO(https://crbug.com/876065): 'official_tests' use
1320 # type='additional_compile_target' to isolate tests. This is not the
1321 # intended use for 'additional_compile_target'.
1322 if (target_type == 'additional_compile_target' and
1323 target != 'official_tests'):
1324 # By definition, additional_compile_targets are not tests, so we
1325 # shouldn't generate isolates for them.
1326 raise MBErr('Cannot generate isolate for %s since it is an '
1327 'additional_compile_target.' % target)
1328 if fuchsia or ios or target_type == 'generated_script':
1329 # iOS and Fuchsia targets end up as groups.
1330 # generated_script targets are always actions.
1331 rpaths = [stamp_runtime_deps]
1333 # Android targets may be either android_apk or executable. The former
1334 # will result in runtime_deps associated with the stamp file, while the
1335 # latter will result in runtime_deps associated with the executable.
1336 label = isolate_map[target]['label']
1338 target + '.runtime_deps',
1340 elif (target_type == 'script'
1341 or isolate_map[target].get('label_type') == 'group'):
1342 # For script targets, the build target is usually a group,
1343 # for which gn generates the runtime_deps next to the stamp file
1344 # for the label, which lives under the obj/ directory, but it may
1345 # also be an executable.
1346 label = isolate_map[target]['label']
1347 rpaths = [stamp_runtime_deps]
1349 rpaths += [ target + '.exe.runtime_deps' ]
1351 rpaths += [ target + '.runtime_deps' ]
1353 rpaths = [target + '.exe.runtime_deps']
1355 rpaths = [target + '.runtime_deps']
1357 possible_runtime_deps_rpaths[target] = rpaths
1359 return possible_runtime_deps_rpaths
1361 def RunGNIsolate(self, vals):
1362 target = self.args.target
1363 isolate_map = self.ReadIsolateMap()
1364 err, labels = self.MapTargetsToLabels(isolate_map, [target])
1370 build_dir = self.args.path
1372 command, extra_files = self.GetSwarmingCommand(target, vals)
1374 # Any warning for an unused arg will get interleaved into the cmd's
1375 # stdout. When that happens, the isolate step below will fail with an
1376 # obscure error when it tries processing the lines of the warning. Fail
1377 # quickly in that case to avoid confusion
1378 cmd = self.GNCmd('desc', build_dir, label, 'runtime_deps',
1379 '--fail-on-unused-args')
1380 ret, out, _ = self.Call(cmd)
1387 for l in out.splitlines():
1388 # FIXME: Can remove this check if/when use_goma is removed.
1389 if 'The gn arg use_goma=true will be deprecated by EOY 2023' not in l:
1390 runtime_deps.append(l)
1392 ret = self.WriteIsolateFiles(build_dir, command, target, runtime_deps, vals,
1397 ret, _, _ = self.Run([
1398 self.PathJoin(self.chromium_src_dir, 'tools', 'luci-go',
1402 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
1404 capture_output=False)
1408 def WriteIsolateFiles(self, build_dir, command, target, runtime_deps, vals,
1410 isolate_path = self.ToAbsPath(build_dir, target + '.isolate')
1411 files = sorted(set(runtime_deps + extra_files))
1413 # Complain if any file is a directory that's inside the build directory,
1414 # since that makes incremental builds incorrect. See
1415 # https://crbug.com/912946
1416 is_android = 'target_os="android"' in vals['gn_args']
1417 is_cros = ('target_os="chromeos"' in vals['gn_args']
1418 or 'is_chromeos_device=true' in vals['gn_args'])
1419 is_msan = 'is_msan=true' in vals['gn_args']
1420 is_ios = 'target_os="ios"' in vals['gn_args']
1421 # pylint: disable=consider-using-ternary
1422 is_mac = ((self.platform == 'darwin' and not is_ios)
1423 or 'target_os="mac"' in vals['gn_args'])
1427 # Skip a few configs that need extra cleanup for now.
1428 # TODO(https://crbug.com/912946): Fix everything on all platforms and
1429 # enable check everywhere.
1433 # iOS has generated directories in gn data items.
1434 # Skipping for iOS instead of listing all apps.
1438 # Skip a few existing violations that need to be cleaned up. Each of
1439 # these will lead to incorrect incremental builds if their directory
1440 # contents change. Do not add to this list, except for mac bundles until
1441 # crbug.com/1000667 is fixed.
1442 # TODO(https://crbug.com/912946): Remove this if statement.
1443 if ((is_msan and f == 'instrumented_libraries_prebuilt/')
1444 or f == 'mr_extension/' or # https://crbug.com/997947
1445 f.startswith('nacl_test_data/') or
1446 f.startswith('ppapi_nacl_tests_libs/') or
1447 (is_cros and f in ( # https://crbug.com/1002509
1448 'chromevox_test_data/',
1449 'gen/ui/file_manager/file_manager/',
1450 'resources/chromeos/',
1451 'resources/chromeos/accessibility/accessibility_common/',
1452 'resources/chromeos/accessibility/chromevox/',
1453 'resources/chromeos/accessibility/select_to_speak/',
1454 'test_data/chrome/browser/resources/chromeos/accessibility/'
1455 'accessibility_common/',
1456 'test_data/chrome/browser/resources/chromeos/accessibility/'
1458 'test_data/chrome/browser/resources/chromeos/accessibility/'
1460 )) or (is_mac and f in ( # https://crbug.com/1000667
1461 'Chromium Framework.framework/',
1462 'Chromium Helper.app/',
1464 'ChromiumUpdater.app/',
1465 'ChromiumUpdater_test.app/',
1466 'Content Shell.app/',
1467 'Google Chrome Framework.framework/',
1468 'Google Chrome Helper (Alerts).app/',
1469 'Google Chrome Helper (GPU).app/',
1470 'Google Chrome Helper (Plugin).app/',
1471 'Google Chrome Helper (Renderer).app/',
1472 'Google Chrome Helper.app/',
1473 'Google Chrome.app/',
1474 'GoogleUpdater.app/',
1475 'GoogleUpdater_test.app/',
1476 'UpdaterTestApp Framework.framework/',
1477 'UpdaterTestApp.app/',
1478 'blink_deprecated_test_plugin.plugin/',
1479 'blink_test_plugin.plugin/',
1480 'corb_test_plugin.plugin/',
1481 'obj/tools/grit/brotli_mac_asan_workaround/',
1482 'ppapi_tests.plugin/',
1483 'ui_unittests Framework.framework/',
1487 # This runs before the build, so we can't use isdir(f). But
1488 # isolate.py luckily requires data directories to end with '/', so we
1489 # can check for that.
1490 if not f.startswith('../../') and f.endswith('/'):
1491 # Don't use self.PathJoin() -- all involved paths consistently use
1492 # forward slashes, so don't add one single backslash on Windows.
1493 err += '\n' + build_dir + '/' + f
1496 self.Print('error: gn `data` items may not list generated directories; '
1497 'list files in directory instead for:' + err)
1507 self.WriteFile(isolate_path, json.dumps(isolate, sort_keys=True) + '\n')
1513 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
1515 'dir': self.chromium_src_dir,
1518 isolate_path + 'd.gen.json',
1523 def MapTargetsToLabels(self, isolate_map, targets):
1527 for target in targets:
1529 labels.append(target)
1530 elif target.startswith('//'):
1531 labels.append(target)
1533 if target in isolate_map:
1534 if isolate_map[target]['type'] == 'unknown':
1535 err += ('test target "%s" type is unknown\n' % target)
1537 labels.append(isolate_map[target]['label'])
1539 err += ('target "%s" not found in '
1540 '//testing/buildbot/gn_isolate_map.pyl\n' % target)
1544 def GNCmd(self, subcommand, path, *args):
1545 if self.platform.startswith('linux'):
1546 subdir, exe = 'linux64', 'gn'
1547 elif self.platform == 'darwin':
1548 subdir, exe = 'mac', 'gn'
1549 elif self.platform == 'aix6':
1550 subdir, exe = 'aix', 'gn'
1552 subdir, exe = 'win', 'gn.exe'
1554 gn_path = self.PathJoin(self.chromium_src_dir, 'buildtools', subdir, exe)
1555 cmd = [gn_path, subcommand]
1557 cmd += ['--root=' + self.args.root]
1558 if self.args.dotfile:
1559 cmd += ['--dotfile=' + self.args.dotfile]
1560 return cmd + [path] + list(args)
1562 def GNArgs(self, vals, expand_imports=False):
1563 """Returns the gn args from vals as a Python dict and a text string.
1565 If expand_imports is true, any import() lines will be read in and
1566 valuese them will be included."""
1567 gn_args = vals['gn_args']
1569 if self.args.goma_dir:
1570 gn_args += ' goma_dir="%s"' % self.args.goma_dir
1572 android_version_code = self.args.android_version_code
1573 if android_version_code:
1574 gn_args += ' android_default_version_code="%s"' % android_version_code
1576 android_version_name = self.args.android_version_name
1577 if android_version_name:
1578 gn_args += ' android_default_version_name="%s"' % android_version_name
1583 args_file = vals.get('args_file', None)
1586 content = self.ReadFile(self.ToAbsPath(args_file))
1587 parsed_gn_args = gn_helpers.FromGNArgs(content)
1589 args_gn_lines.append('import("%s")' % args_file)
1591 # Canonicalize the arg string into a sorted, newline-separated list
1592 # of key-value pairs, and de-dup the keys if need be so that only
1593 # the last instance of each arg is listed.
1594 parsed_gn_args.update(gn_helpers.FromGNArgs(gn_args))
1595 args_gn_lines.append(gn_helpers.ToGNString(parsed_gn_args))
1597 return parsed_gn_args, '\n'.join(args_gn_lines)
1599 def GetSwarmingCommand(self, target, vals):
1600 isolate_map = self.ReadIsolateMap()
1602 is_android = 'target_os="android"' in vals['gn_args']
1603 is_fuchsia = 'target_os="fuchsia"' in vals['gn_args']
1604 is_cros = ('target_os="chromeos"' in vals['gn_args']
1605 or 'is_chromeos_device=true' in vals['gn_args'])
1606 is_cros_device = 'is_chromeos_device=true' in vals['gn_args']
1607 is_ios = 'target_os="ios"' in vals['gn_args']
1608 # pylint: disable=consider-using-ternary
1609 is_mac = ((self.platform == 'darwin' and not is_ios)
1610 or 'target_os="mac"' in vals['gn_args'])
1611 is_win = self.platform == 'win32' or 'target_os="win"' in vals['gn_args']
1612 is_lacros = 'chromeos_is_browser_only=true' in vals['gn_args']
1614 # This should be true if tests with type='windowed_test_launcher' are
1615 # expected to run using xvfb. For example, Linux Desktop, X11 CrOS and
1616 # Ozone CrOS builds on Linux (xvfb is not used on CrOS HW or VMs). Note
1617 # that one Ozone build can be used to run different backends. Currently,
1618 # tests are executed for the headless and X11 backends and both can run
1619 # under Xvfb on Linux.
1620 use_xvfb = (self.platform.startswith('linux') and not is_android
1621 and not is_fuchsia and not is_cros_device)
1623 asan = 'is_asan=true' in vals['gn_args']
1624 lsan = 'is_lsan=true' in vals['gn_args']
1625 msan = 'is_msan=true' in vals['gn_args']
1626 tsan = 'is_tsan=true' in vals['gn_args']
1627 cfi_diag = 'use_cfi_diag=true' in vals['gn_args']
1628 # Treat sanitizer warnings as test case failures (crbug/1442587).
1629 fail_on_san_warnings = 'fail_on_san_warnings=true' in vals['gn_args']
1630 clang_coverage = 'use_clang_coverage=true' in vals['gn_args']
1631 java_coverage = 'use_jacoco_coverage=true' in vals['gn_args']
1632 javascript_coverage = 'use_javascript_coverage=true' in vals['gn_args']
1634 test_type = isolate_map[target]['type']
1636 if self.use_luci_auth:
1637 cmdline = ['luci-auth.exe' if is_win else 'luci-auth', 'context', '--']
1641 if getattr(self.args, 'bot_mode', True):
1642 bot_mode = ('--test-launcher-bot-mode', )
1646 if test_type == 'generated_script' or is_ios or is_lacros:
1647 assert 'script' not in isolate_map[target], (
1648 'generated_scripts can no longer customize the script path')
1650 default_script = 'bin\\run_{}.bat'.format(target)
1652 default_script = 'bin/run_{}'.format(target)
1653 script = isolate_map[target].get('script', default_script)
1655 # TODO(crbug.com/816629): remove any use of 'args' from
1656 # generated_scripts.
1657 cmdline += [script] + isolate_map[target].get('args', [])
1660 cmdline += ['--coverage-dir', '${ISOLATED_OUTDIR}/coverage']
1665 # TODO(crbug.com/816629): Convert all targets to generated_scripts
1666 # and delete the rest of this function.
1667 executable = isolate_map[target].get('executable', target)
1668 executable_suffix = isolate_map[target].get(
1669 'executable_suffix', '.exe' if is_win else '')
1671 vpython_exe = 'vpython3'
1674 '../../testing/test_env.py',
1677 if is_android and test_type != 'script':
1679 cmdline += [os.path.join('bin', 'run_with_asan'), '--']
1681 vpython_exe, '../../build/android/test_wrapper/logdog_wrapper.py',
1682 '--target', target, '--logdog-bin-cmd',
1683 '../../.task_template_packages/logdog_butler'
1685 if test_type != 'junit_test':
1686 cmdline += ['--store-tombstones']
1687 if clang_coverage or java_coverage:
1688 cmdline += ['--coverage-dir', '${ISOLATED_OUTDIR}']
1689 elif is_fuchsia and test_type != 'script':
1690 # On Fuchsia, the generated bin/run_* test scripts are used both in
1691 # infrastructure and by developers. test_env.py is intended to establish a
1692 # predictable environment for automated testing. In particular, it adds
1693 # CHROME_HEADLESS=1 to the environment for child processes. This variable
1694 # is a signal to both test and production code that it is running in the
1695 # context of automated an testing environment, and should not be present
1696 # for normal developer workflows.
1699 '../../testing/test_env.py',
1700 os.path.join('bin', 'run_%s' % target),
1702 '--logs-dir=${ISOLATED_OUTDIR}',
1704 elif is_cros_device and test_type != 'script':
1706 os.path.join('bin', 'run_%s' % target),
1707 '--logs-dir=${ISOLATED_OUTDIR}',
1709 elif use_xvfb and test_type == 'windowed_test_launcher':
1710 extra_files.append('../../testing/xvfb.py')
1713 '../../testing/xvfb.py',
1714 './' + str(executable) + executable_suffix,
1717 '--lsan=%d' % asan, # Enable lsan when asan is enabled.
1720 '--cfi-diag=%d' % cfi_diag,
1723 if fail_on_san_warnings:
1724 cmdline += ['--fail-san=1']
1726 if javascript_coverage:
1727 cmdline += ['--devtools-code-coverage=${ISOLATED_OUTDIR}']
1728 elif test_type in ('windowed_test_launcher', 'console_test_launcher'):
1731 '../../testing/test_env.py',
1732 './' + str(executable) + executable_suffix,
1735 # Enable lsan when asan is enabled except on Windows where LSAN isn't
1737 # TODO(https://crbug.com/1320449): Enable on Mac inside asan once
1739 # TODO(https://crbug.com/974478): Enable on ChromeOS once things pass.
1741 or (asan and not is_mac and not is_win and not is_cros),
1744 '--cfi-diag=%d' % cfi_diag,
1747 if fail_on_san_warnings:
1748 cmdline += ['--fail-san=1']
1749 elif test_type == 'script':
1750 # If we're testing a CrOS simplechrome build, assume we need to prepare a
1751 # DUT for testing. So prepend the command to run with the test wrapper.
1754 os.path.join('bin', 'cros_test_wrapper'),
1755 '--logs-dir=${ISOLATED_OUTDIR}',
1758 extra_files.append('../../build/android/test_wrapper/logdog_wrapper.py')
1761 '../../testing/test_env.py',
1762 '../../build/android/test_wrapper/logdog_wrapper.py',
1764 '../../' + self.ToSrcRelPath(isolate_map[target]['script']),
1766 '../../.task_template_packages/logdog_butler',
1770 vpython_exe, '../../testing/test_env.py',
1771 '../../' + self.ToSrcRelPath(isolate_map[target]['script'])
1773 elif test_type == 'additional_compile_target':
1775 './' + str(target) + executable_suffix,
1778 self.WriteFailureAndRaise('No command line for %s found (test type %s).'
1779 % (target, test_type), output_path=None)
1781 cmdline += isolate_map[target].get('args', [])
1783 return cmdline, extra_files
1785 def ToAbsPath(self, build_path, *comps):
1786 return self.PathJoin(self.chromium_src_dir,
1787 self.ToSrcRelPath(build_path),
1790 def ToSrcRelPath(self, path):
1791 """Returns a relative path from the top of the repo."""
1792 if path.startswith('//'):
1793 return path[2:].replace('/', self.sep)
1794 return self.RelPath(path, self.chromium_src_dir)
1796 def RunGNAnalyze(self, vals):
1797 # Analyze runs before 'gn gen' now, so we need to run gn gen
1798 # in order to ensure that we have a build directory.
1799 ret = self.RunGNGen(vals, compute_inputs_for_analyze=True, check=False)
1803 build_path = self.args.path
1804 input_path = self.args.input_path
1805 gn_input_path = input_path + '.gn'
1806 output_path = self.args.output_path
1807 gn_output_path = output_path + '.gn'
1809 inp = self.ReadInputJSON(['files', 'test_targets',
1810 'additional_compile_targets'])
1811 if self.args.verbose:
1813 self.Print('analyze input:')
1818 # This shouldn't normally happen, but could due to unusual race conditions,
1819 # like a try job that gets scheduled before a patch lands but runs after
1820 # the patch has landed.
1821 if not inp['files']:
1822 self.Print('Warning: No files modified in patch, bailing out early.')
1824 'status': 'No dependency',
1825 'compile_targets': [],
1831 gn_inp['files'] = ['//' + f for f in inp['files'] if not f.startswith('//')]
1833 isolate_map = self.ReadIsolateMap()
1834 err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels(
1835 isolate_map, inp['additional_compile_targets'])
1839 err, gn_inp['test_targets'] = self.MapTargetsToLabels(
1840 isolate_map, inp['test_targets'])
1843 labels_to_targets = {}
1844 for i, label in enumerate(gn_inp['test_targets']):
1845 labels_to_targets[label] = inp['test_targets'][i]
1848 self.WriteJSON(gn_inp, gn_input_path)
1849 cmd = self.GNCmd('analyze', build_path, gn_input_path, gn_output_path)
1850 ret, output, _ = self.Run(cmd, force_verbose=True)
1852 if self.args.json_output:
1853 # write errors to json.output
1854 self.WriteJSON({'output': output}, self.args.json_output)
1857 gn_outp_str = self.ReadFile(gn_output_path)
1859 gn_outp = json.loads(gn_outp_str)
1860 except Exception as e:
1861 self.Print("Failed to parse the JSON string GN returned: %s\n%s"
1862 % (repr(gn_outp_str), str(e)))
1866 if 'status' in gn_outp:
1867 outp['status'] = gn_outp['status']
1868 if 'error' in gn_outp:
1869 outp['error'] = gn_outp['error']
1870 if 'invalid_targets' in gn_outp:
1871 outp['invalid_targets'] = gn_outp['invalid_targets']
1872 if 'compile_targets' in gn_outp:
1873 all_input_compile_targets = sorted(
1874 set(inp['test_targets'] + inp['additional_compile_targets']))
1876 # If we're building 'all', we can throw away the rest of the targets
1877 # since they're redundant.
1878 if 'all' in gn_outp['compile_targets']:
1879 outp['compile_targets'] = ['all']
1881 outp['compile_targets'] = gn_outp['compile_targets']
1883 # crbug.com/736215: When GN returns targets back, for targets in
1884 # the default toolchain, GN will have generated a phony ninja
1885 # target matching the label, and so we can safely (and easily)
1886 # transform any GN label into the matching ninja target. For
1887 # targets in other toolchains, though, GN doesn't generate the
1888 # phony targets, and we don't know how to turn the labels into
1889 # compile targets. In this case, we also conservatively give up
1890 # and build everything. Probably the right thing to do here is
1891 # to have GN return the compile targets directly.
1892 if any("(" in target for target in outp['compile_targets']):
1893 self.Print('WARNING: targets with non-default toolchains were '
1894 'found, building everything instead.')
1895 outp['compile_targets'] = all_input_compile_targets
1897 outp['compile_targets'] = [
1898 label.replace('//', '') for label in outp['compile_targets']]
1900 # Windows has a maximum command line length of 8k; even Linux
1901 # maxes out at 128k; if analyze returns a *really long* list of
1902 # targets, we just give up and conservatively build everything instead.
1903 # Probably the right thing here is for ninja to support response
1904 # files as input on the command line
1905 # (see https://github.com/ninja-build/ninja/issues/1355).
1906 # Android targets use a lot of templates and often exceed 7kb.
1907 # https://crbug.com/946266
1908 max_cmd_length_kb = 64 if platform.system() == 'Linux' else 7
1910 if len(' '.join(outp['compile_targets'])) > max_cmd_length_kb * 1024:
1911 self.Print('WARNING: Too many compile targets were affected.')
1912 self.Print('WARNING: Building everything instead to avoid '
1913 'command-line length issues.')
1914 outp['compile_targets'] = all_input_compile_targets
1917 if 'test_targets' in gn_outp:
1918 outp['test_targets'] = [
1919 labels_to_targets[label] for label in gn_outp['test_targets']]
1921 if self.args.verbose:
1923 self.Print('analyze output:')
1924 self.PrintJSON(outp)
1927 self.WriteJSON(outp, output_path)
1930 if self.Exists(gn_input_path):
1931 self.RemoveFile(gn_input_path)
1932 if self.Exists(gn_output_path):
1933 self.RemoveFile(gn_output_path)
1937 def ReadInputJSON(self, required_keys):
1938 path = self.args.input_path
1939 output_path = self.args.output_path
1940 if not self.Exists(path):
1941 self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
1944 inp = json.loads(self.ReadFile(path))
1945 except Exception as e:
1946 self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
1947 (path, e), output_path)
1949 for k in required_keys:
1951 self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
1956 def WriteFailureAndRaise(self, msg, output_path):
1958 self.WriteJSON({'error': msg}, output_path, force_verbose=True)
1961 def WriteJSON(self, obj, path, force_verbose=False):
1963 self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n',
1964 force_verbose=force_verbose)
1965 except Exception as e:
1966 raise MBErr('Error %s writing to the output path "%s"' % (e, path)) from e
1968 def PrintCmd(self, cmd):
1969 if self.platform == 'win32':
1970 shell_quoter = QuoteForCmd
1972 shell_quoter = shlex.quote
1974 if cmd[0] == self.executable:
1975 cmd = ['python'] + cmd[1:]
1976 self.Print(*[shell_quoter(arg) for arg in cmd])
1978 def PrintJSON(self, obj):
1979 self.Print(json.dumps(obj, indent=2, sort_keys=True))
1981 def Build(self, target):
1982 build_dir = self.ToSrcRelPath(self.args.path)
1983 if self.platform == 'win32':
1984 # On Windows use the batch script since there is no exe
1985 ninja_cmd = ['autoninja.bat', '-C', build_dir]
1987 ninja_cmd = ['autoninja', '-C', build_dir]
1989 ninja_cmd.extend(['-j', '%d' % self.args.jobs])
1990 ninja_cmd.append(target)
1991 ret, _, _ = self.Run(ninja_cmd, capture_output=False)
1994 def Run(self, cmd, env=None, force_verbose=True, capture_output=True):
1995 # This function largely exists so it can be overridden for testing.
1996 if self.args.dryrun or self.args.verbose or force_verbose:
1998 if self.args.dryrun:
2001 ret, out, err = self.Call(cmd, env=env, capture_output=capture_output)
2002 if self.args.verbose or force_verbose:
2004 self.Print(' -> returned %d' % ret)
2006 # This is the error seen on the logs
2007 self.Print(out, end='')
2009 self.Print(err, end='', file=sys.stderr)
2010 return ret, out, err
2012 # Call has argument input to match subprocess.run
2017 capture_output=True,
2019 ): # pylint: disable=redefined-builtin
2020 # We are returning the exit code, we don't want an exception thrown
2021 # for non-zero exit code
2022 # pylint: disable=subprocess-run-check
2023 p = subprocess.run(cmd,
2025 capture_output=capture_output,
2026 cwd=self.chromium_src_dir,
2030 return p.returncode, p.stdout, p.stderr
2032 def Exists(self, path):
2033 # This function largely exists so it can be overridden for testing.
2034 return os.path.exists(path)
2036 def Fetch(self, url):
2037 # This function largely exists so it can be overridden for testing.
2038 f = urllib.request.urlopen(url)
2043 def ListDir(self, path):
2044 # This function largely exists so it can be overridden for testing.
2045 return os.listdir(path)
2047 def MaybeMakeDirectory(self, path):
2050 except OSError as e:
2051 if e.errno != errno.EEXIST:
2054 def PathJoin(self, *comps):
2055 # This function largely exists so it can be overriden for testing.
2056 return os.path.join(*comps)
2058 def Print(self, *args, **kwargs):
2059 # This function largely exists so it can be overridden for testing.
2060 print(*args, **kwargs)
2061 if kwargs.get('stream', sys.stdout) == sys.stdout:
2064 def ReadFile(self, path):
2065 # This function largely exists so it can be overriden for testing.
2066 with open(path) as fp:
2069 def RelPath(self, path, start='.'):
2070 # This function largely exists so it can be overriden for testing.
2071 return os.path.relpath(path, start)
2073 def RemoveFile(self, path):
2074 # This function largely exists so it can be overriden for testing.
2077 def RemoveDirectory(self, abs_path):
2078 if self.platform == 'win32':
2079 # In other places in chromium, we often have to retry this command
2080 # because we're worried about other processes still holding on to
2081 # file handles, but when MB is invoked, it will be early enough in the
2082 # build that their should be no other processes to interfere. We
2083 # can change this if need be.
2084 self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path])
2086 shutil.rmtree(abs_path, ignore_errors=True)
2089 # This function largely exists so it can be overriden for testing.
2090 return tempfile.mkdtemp(prefix='mb_')
2092 def TempFile(self, mode='w'):
2093 # This function largely exists so it can be overriden for testing.
2094 return tempfile.NamedTemporaryFile(mode=mode, delete=False)
2096 def WriteFile(self, path, contents, force_verbose=False):
2097 # This function largely exists so it can be overriden for testing.
2098 if self.args.dryrun or self.args.verbose or force_verbose:
2099 self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
2100 with open(path, 'w', encoding='utf-8', newline='') as fp:
2101 return fp.write(contents)
2104 def FlattenConfig(config_pool, mixin_pool, config):
2105 mixins = config_pool[config]
2106 vals = DefaultVals()
2109 FlattenMixins(mixin_pool, mixins, vals, visited)
2113 def FlattenMixins(mixin_pool, mixins_to_flatten, vals, visited):
2114 for m in mixins_to_flatten:
2115 if m not in mixin_pool:
2116 raise MBErr('Unknown mixin "%s"' % m)
2120 mixin_vals = mixin_pool[m]
2122 if 'args_file' in mixin_vals:
2123 if vals['args_file']:
2124 raise MBErr('args_file specified multiple times in mixins '
2126 vals['args_file'] = mixin_vals['args_file']
2127 if 'gn_args' in mixin_vals:
2129 vals['gn_args'] += ' ' + mixin_vals['gn_args']
2131 vals['gn_args'] = mixin_vals['gn_args']
2133 if 'mixins' in mixin_vals:
2134 FlattenMixins(mixin_pool, mixin_vals['mixins'], vals, visited)
2139 class MBErr(Exception):
2143 # See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful
2144 # details of this next section, which handles escaping command lines
2145 # so that they can be copied and pasted into a cmd window.
2146 UNSAFE_FOR_SET = set('^<>&|')
2147 UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%'))
2148 ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"'))
2151 def QuoteForSet(arg):
2152 if any(a in UNSAFE_FOR_SET for a in arg):
2153 arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg)
2157 def QuoteForCmd(arg):
2158 # First, escape the arg so that CommandLineToArgvW will parse it properly.
2159 if arg == '' or ' ' in arg or '"' in arg:
2160 quote_re = re.compile(r'(\\*)"')
2161 arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg))
2163 # Then check to see if the arg contains any metacharacters other than
2164 # double quotes; if it does, quote everything (including the double
2165 # quotes) for safety.
2166 if any(a in UNSAFE_FOR_CMD for a in arg):
2167 arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg)
2171 if __name__ == '__main__':
2172 sys.exit(main(sys.argv[1:]))