1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Module containing the various individual commands a builder can run."""
7 from __future__ import print_function
14 import multiprocessing
20 from chromite.cbuildbot import cbuildbot_config
21 from chromite.cbuildbot import failures_lib
22 from chromite.cbuildbot import constants
23 from chromite.cros.tests import cros_vm_test
24 from chromite.lib import cros_build_lib
25 from chromite.lib import git
26 from chromite.lib import gob_util
27 from chromite.lib import gs
28 from chromite.lib import locking
29 from chromite.lib import osutils
30 from chromite.lib import parallel
31 from chromite.lib import retry_util
32 from chromite.lib import timeout_util
33 from chromite.scripts import pushimage
34 from chromite.scripts import upload_symbols
37 _PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list'
38 CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome')
39 _CROS_ARCHIVE_URL = 'CROS_ARCHIVE_URL'
40 _FACTORY_SHIM = 'factory_shim'
41 _AUTOTEST_RPC_CLIENT = ('/b/build_internal/scripts/slave-internal/autotest_rpc/'
42 'autotest_rpc_client.py')
43 _AUTOTEST_RPC_HOSTNAME = 'master2'
44 _LOCAL_BUILD_FLAGS = ['--nousepkg', '--reuse_pkgs_from_local_boards']
45 UPLOADED_LIST_FILENAME = 'UPLOADED'
46 STATEFUL_FILE = 'stateful.tgz'
47 # For sorting through VM test results.
48 _TEST_REPORT_FILENAME = 'test_report.log'
49 _TEST_PASSED = 'PASSED'
50 _TEST_FAILED = 'FAILED'
53 class TestFailure(failures_lib.StepFailure):
54 """Raised if a test stage (e.g. VMTest) fails."""
56 class TestWarning(failures_lib.StepFailure):
57 """Raised if a test stage (e.g. VMTest) returns a warning code."""
59 class SuiteTimedOut(failures_lib.TestLabFailure):
60 """Raised if a test suite timed out with no test failures."""
62 class BoardNotAvailable(failures_lib.TestLabFailure):
63 """Raised if the board is not available in the lab."""
66 # =========================== Command Helpers =================================
69 def RunBuildScript(buildroot, cmd, chromite_cmd=False, **kwargs):
70 """Run a build script, wrapping exceptions as needed.
72 This wraps RunCommand(cmd, cwd=buildroot, **kwargs), adding extra logic to
73 help determine the cause of command failures.
74 - If a package fails to build, a PackageBuildFailure exception is thrown,
75 which lists exactly which packages failed to build.
76 - If the command fails for a different reason, a BuildScriptFailure
79 We detect what packages failed to build by creating a temporary status file,
80 and passing that status file to parallel_emerge via the
81 PARALLEL_EMERGE_STATUS_FILE variable.
84 buildroot: The root of the build directory.
85 cmd: The command to run.
86 chromite_cmd: Whether the command should be evaluated relative to the
87 chromite/bin subdir of the |buildroot|.
88 kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
89 In addition, if 'sudo' kwarg is True, SudoRunCommand will be used.
91 assert not kwargs.get('shell', False), 'Cannot execute shell commands'
92 kwargs.setdefault('cwd', buildroot)
93 enter_chroot = kwargs.get('enter_chroot', False)
94 sudo = kwargs.pop('sudo', False)
99 cmd[0] = git.ReinterpretPathForChroot(
100 os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0]))
102 cmd[0] = os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0])
104 # If we are entering the chroot, create status file for tracking what
105 # packages failed to build.
106 chroot_tmp = os.path.join(buildroot, 'chroot', 'tmp')
108 with cros_build_lib.ContextManagerStack() as stack:
109 if enter_chroot and os.path.exists(chroot_tmp):
110 kwargs['extra_env'] = (kwargs.get('extra_env') or {}).copy()
111 status_file = stack.Add(tempfile.NamedTemporaryFile, dir=chroot_tmp)
112 kwargs['extra_env']['PARALLEL_EMERGE_STATUS_FILE'] = \
113 git.ReinterpretPathForChroot(status_file.name)
114 runcmd = cros_build_lib.RunCommand
116 runcmd = cros_build_lib.SudoRunCommand
118 return runcmd(cmd, **kwargs)
119 except cros_build_lib.RunCommandError as ex:
120 # Print the original exception.
121 cros_build_lib.Error('\n%s', ex)
123 # Check whether a specific package failed. If so, wrap the exception
124 # appropriately. These failures are usually caused by a recent CL, so we
125 # don't ever treat these failures as flaky.
126 if status_file is not None:
128 failed_packages = status_file.read().split()
130 raise failures_lib.PackageBuildFailure(ex, cmd[0], failed_packages)
132 # Looks like a generic failure. Raise a BuildScriptFailure.
133 raise failures_lib.BuildScriptFailure(ex, cmd[0])
136 def GetInput(prompt):
137 """Helper function to grab input from a user. Makes testing easier."""
138 return raw_input(prompt)
141 def ValidateClobber(buildroot):
142 """Do due diligence if user wants to clobber buildroot.
145 buildroot: buildroot that's potentially clobbered.
148 True if the clobber is ok.
150 cwd = os.path.dirname(os.path.realpath(__file__))
151 if cwd.startswith(buildroot):
152 cros_build_lib.Die('You are trying to clobber this chromite checkout!')
155 cros_build_lib.Die('Refusing to clobber your system!')
157 if os.path.exists(buildroot):
158 return cros_build_lib.BooleanPrompt(default=False)
162 # =========================== Main Commands ===================================
165 def BuildRootGitCleanup(buildroot):
166 """Put buildroot onto manifest branch. Delete branches created on last run.
169 buildroot: buildroot to clean up.
171 lock_path = os.path.join(buildroot, '.clean_lock')
172 deleted_objdirs = multiprocessing.Event()
174 def RunCleanupCommands(project, cwd):
175 with locking.FileLock(lock_path, verbose=False).read_lock() as lock:
176 # Calculate where the git repository is stored.
177 relpath = os.path.relpath(cwd, buildroot)
178 projects_dir = os.path.join(buildroot, '.repo', 'projects')
179 project_objects_dir = os.path.join(buildroot, '.repo', 'project-objects')
180 repo_git_store = '%s.git' % os.path.join(projects_dir, relpath)
181 repo_obj_store = '%s.git' % os.path.join(project_objects_dir, project)
184 if os.path.isdir(cwd):
185 git.CleanAndDetachHead(cwd)
186 git.GarbageCollection(cwd)
187 except cros_build_lib.RunCommandError as e:
189 cros_build_lib.PrintBuildbotStepWarnings()
190 logging.warn('\n%s', result.error)
192 # If there's no repository corruption, just delete the index.
193 corrupted = git.IsGitRepositoryCorrupted(cwd)
195 logging.warn('Deleting %s because %s failed', cwd, result.cmd)
196 osutils.RmDir(cwd, ignore_missing=True)
198 # Looks like the object dir is corrupted. Delete the whole repository.
199 deleted_objdirs.set()
200 for store in (repo_git_store, repo_obj_store):
201 logging.warn('Deleting %s as well', store)
202 osutils.RmDir(store, ignore_missing=True)
204 # Delete all branches created by cbuildbot.
205 if os.path.isdir(repo_git_store):
206 cmd = ['branch', '-D'] + list(constants.CREATED_BRANCHES)
207 git.RunGit(repo_git_store, cmd, error_code_ok=True)
209 # Cleanup all of the directories.
210 dirs = [[attrs['name'], os.path.join(buildroot, attrs['path'])] for attrs in
211 git.ManifestCheckout.Cached(buildroot).ListCheckouts()]
212 parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
214 # repo shares git object directories amongst multiple project paths. If the
215 # first pass deleted an object dir for a project path, then other repositories
216 # (project paths) of that same project may now be broken. Do a second pass to
217 # clean them up as well.
218 if deleted_objdirs.is_set():
219 parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
222 def CleanUpMountPoints(buildroot):
223 """Cleans up any stale mount points from previous runs."""
224 # Scrape it from /proc/mounts since it's easily accessible;
225 # additionally, unmount in reverse order of what's listed there
226 # rather than trying a reverse sorting; it's possible for
228 # mount /foon/blah -o loop /a
229 # which reverse sorting cannot handle.
230 buildroot = os.path.realpath(buildroot).rstrip('/') + '/'
231 mounts = [mtab.destination for mtab in osutils.IterateMountPoints() if
232 mtab.destination.startswith(buildroot)]
234 for mount_pt in reversed(mounts):
235 osutils.UmountDir(mount_pt, lazy=True, cleanup=False)
238 def WipeOldOutput(buildroot):
239 """Wipes out build output directory.
242 buildroot: Root directory where build occurs.
243 board: Delete image directories for this board name.
245 image_dir = os.path.join(buildroot, 'src', 'build', 'images')
246 osutils.RmDir(image_dir, ignore_missing=True, sudo=True)
249 def MakeChroot(buildroot, replace, use_sdk, chrome_root=None, extra_env=None):
250 """Wrapper around make_chroot."""
251 cmd = ['cros_sdk', '--buildbot-log-version']
252 cmd.append('--create' if use_sdk else '--bootstrap')
255 cmd.append('--replace')
258 cmd.append('--chrome_root=%s' % chrome_root)
260 RunBuildScript(buildroot, cmd, extra_env=extra_env)
263 def RunChrootUpgradeHooks(buildroot, chrome_root=None):
264 """Run the chroot upgrade hooks in the chroot."""
267 chroot_args.append('--chrome_root=%s' % chrome_root)
269 RunBuildScript(buildroot, ['./run_chroot_version_hooks'],
270 enter_chroot=True, chroot_args=chroot_args)
273 def RefreshPackageStatus(buildroot, boards, debug):
274 """Wrapper around refresh_package_status"""
275 # First run check_gdata_token to validate or refresh auth token.
276 cmd = ['check_gdata_token']
277 RunBuildScript(buildroot, cmd, chromite_cmd=True)
279 # Prepare refresh_package_status command to update the package spreadsheet.
280 cmd = ['refresh_package_status']
282 # Skip the host board if present.
283 board = ':'.join([b for b in boards if b != 'amd64-host'])
284 cmd.append('--board=%s' % board)
286 # Upload to the test spreadsheet only when in debug mode.
288 cmd.append('--test-spreadsheet')
290 # Actually run prepared refresh_package_status command.
291 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
293 # Disabling the auto-filing of Tracker issues for now - crbug.com/334260.
294 #SyncPackageStatus(buildroot, debug)
297 def SyncPackageStatus(buildroot, debug):
298 """Wrapper around sync_package_status."""
299 # Run sync_package_status to create Tracker issues for outdated
300 # packages. At the moment, this runs only for groups that have opted in.
301 basecmd = ['sync_package_status']
303 basecmd.extend(['--pretend', '--test-spreadsheet'])
305 cmdargslist = [['--team=build'],
306 ['--team=kernel', '--default-owner=arscott'],
309 for cmdargs in cmdargslist:
310 cmd = basecmd + cmdargs
311 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
314 def SetSharedUserPassword(buildroot, password):
315 """Wrapper around set_shared_user_password.sh"""
316 if password is not None:
317 cmd = ['./set_shared_user_password.sh', password]
318 RunBuildScript(buildroot, cmd, enter_chroot=True)
320 passwd_file = os.path.join(buildroot, 'chroot/etc/shared_user_passwd.txt')
321 osutils.SafeUnlink(passwd_file, sudo=True)
324 def UpdateChroot(buildroot, usepkg, toolchain_boards=None, extra_env=None):
325 """Wrapper around update_chroot.
328 buildroot: The buildroot of the current build.
329 usepkg: Whether to use binary packages when setting up the toolchain.
330 toolchain_boards: List of boards to always include.
331 extra_env: A dictionary of environmental variables to set during generation.
333 cmd = ['./update_chroot']
336 cmd.extend(['--nousepkg'])
339 cmd.extend(['--toolchain_boards', ','.join(toolchain_boards)])
341 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
344 def SetupBoard(buildroot, board, usepkg, chrome_binhost_only=False,
345 extra_env=None, force=False, profile=None, chroot_upgrade=True):
346 """Wrapper around setup_board.
349 buildroot: The buildroot of the current build.
350 board: The board to set up.
351 usepkg: Whether to use binary packages when setting up the board.
352 chrome_binhost_only: If set, only use binary packages on the board for
354 extra_env: A dictionary of environmental variables to set during generation.
355 force: Whether to remove the board prior to setting it up.
356 profile: The profile to use with this board.
357 chroot_upgrade: Whether to update the chroot. If the chroot is already up to
358 date, you can specify chroot_upgrade=False.
360 cmd = ['./setup_board', '--board=%s' % board,
361 '--accept_licenses=@CHROMEOS']
363 # This isn't the greatest thing, but emerge's dependency calculation
364 # isn't the speediest thing, so let callers skip this step when they
365 # know the system is up-to-date already.
366 if not chroot_upgrade:
367 cmd.append('--skip_chroot_upgrade')
370 cmd.append('--profile=%s' % profile)
373 cmd.extend(_LOCAL_BUILD_FLAGS)
375 if chrome_binhost_only:
376 cmd.append('--chrome_binhost_only')
379 cmd.append('--force')
381 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
384 class MissingBinpkg(failures_lib.InfrastructureFailure):
385 """Error class for when we are missing an essential binpkg."""
388 def VerifyBinpkg(buildroot, board, pkg, extra_env=None):
389 """Verify that an appropriate binary package exists for |pkg|.
392 buildroot: The buildroot of the current build.
393 board: The board to set up.
394 pkg: The package to look for.
395 extra_env: A dictionary of environmental variables to set.
397 cmd = ['emerge-%s' % board, '-pegNv', '--color=n', 'virtual/target-os']
398 result = RunBuildScript(buildroot, cmd, capture_output=True,
399 enter_chroot=True, extra_env=extra_env)
400 pattern = r'^\[(ebuild|binary).*%s' % re.escape(pkg)
401 m = re.search(pattern, result.output, re.MULTILINE)
402 if m and m.group(1) == 'ebuild':
403 cros_build_lib.Info('(output):\n%s', result.output)
404 msg = 'Cannot find prebuilts for %s on %s' % (pkg, board)
405 raise MissingBinpkg(msg)
408 def Build(buildroot, board, build_autotest, usepkg, chrome_binhost_only,
409 packages=(), skip_chroot_upgrade=True, noworkon=False,
410 extra_env=None, chrome_root=None):
411 """Wrapper around build_packages.
414 buildroot: The buildroot of the current build.
415 board: The board to set up.
416 build_autotest: Whether to build autotest-related packages.
417 usepkg: Whether to use binary packages.
418 chrome_binhost_only: If set, only use binary packages on the board for
420 packages: Tuple of specific packages we want to build. If empty,
421 build_packages will calculate a list of packages automatically.
422 skip_chroot_upgrade: Whether to skip the chroot update. If the chroot is
423 not yet up to date, you should specify skip_chroot_upgrade=False.
424 noworkon: If set, don't force-build workon packages.
425 extra_env: A dictionary of environmental variables to set during generation.
426 chrome_root: The directory where chrome is stored.
428 cmd = ['./build_packages', '--board=%s' % board,
429 '--accept_licenses=@CHROMEOS']
431 if not build_autotest:
432 cmd.append('--nowithautotest')
434 if skip_chroot_upgrade:
435 cmd.append('--skip_chroot_upgrade')
438 cmd.extend(_LOCAL_BUILD_FLAGS)
440 if chrome_binhost_only:
441 cmd.append('--chrome_binhost_only')
444 cmd.append('--noworkon')
448 chroot_args.append('--chrome_root=%s' % chrome_root)
451 RunBuildScript(buildroot, cmd, extra_env=extra_env, chroot_args=chroot_args,
455 FirmwareVersions = collections.namedtuple(
461 def GetFirmwareVersions(buildroot, board):
462 """Extract version information from the firmware updater, if one exists.
465 buildroot: The buildroot of the current build.
466 board: The board the firmware is for.
469 (main fw version, ec fw version)
470 Each element will either be set to the string output by the firmware
471 updater shellball, or None if there is no firmware updater.
473 updater = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR,
474 cros_build_lib.GetSysroot(board).lstrip(os.path.sep),
475 'usr', 'sbin', 'chromeos-firmwareupdate')
476 if not os.path.isfile(updater):
477 return FirmwareVersions(None, None)
478 updater = git.ReinterpretPathForChroot(updater)
480 result = cros_build_lib.RunCommand([updater, '-V'], enter_chroot=True,
481 capture_output=True, log_output=True,
483 main = re.search(r'BIOS version:\s*(?P<version>.*)', result.output)
484 ec = re.search(r'EC version:\s*(?P<version>.*)', result.output)
485 return (main.group('version') if main else None,
486 ec.group('version') if ec else None)
489 def BuildImage(buildroot, board, images_to_build, version=None,
490 rootfs_verification=True, extra_env=None, disk_layout=None):
492 # Default to base if images_to_build is passed empty.
493 if not images_to_build:
494 images_to_build = ['base']
496 version_str = '--version=%s' % (version or '')
498 cmd = ['./build_image', '--board=%s' % board, '--replace', version_str]
500 if not rootfs_verification:
501 cmd += ['--noenable_rootfs_verification']
504 cmd += ['--disk_layout=%s' % disk_layout]
506 cmd += images_to_build
508 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
511 def GenerateAuZip(buildroot, image_dir, extra_env=None):
512 """Run the script which generates au-generator.zip.
515 buildroot: The buildroot of the current build.
516 image_dir: The directory in which to store au-generator.zip.
517 extra_env: A dictionary of environmental variables to set during generation.
520 failures_lib.BuildScriptFailure if the called script fails.
522 chroot_image_dir = git.ReinterpretPathForChroot(image_dir)
523 cmd = ['./build_library/generate_au_zip.py', '-o', chroot_image_dir]
524 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
527 def TestAuZip(buildroot, image_dir, extra_env=None):
528 """Run the script which validates an au-generator.zip.
531 buildroot: The buildroot of the current build.
532 image_dir: The directory in which to find au-generator.zip.
533 extra_env: A dictionary of environmental variables to set during generation.
536 failures_lib.BuildScriptFailure if the test script fails.
538 cmd = ['./build_library/test_au_zip.py', '-o', image_dir]
539 RunBuildScript(buildroot, cmd, cwd=constants.CROSUTILS_DIR,
543 def BuildVMImageForTesting(buildroot, board, extra_env=None):
544 cmd = ['./image_to_vm.sh', '--board=%s' % board, '--test_image']
545 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
548 def RunTestImage(buildroot, board, image_dir, results_dir):
549 """Executes test_image on the produced image in |image_dir|.
551 The "test_image" script will be run as root in chroot. Running the script as
552 root will allow the tests to read normally-forbidden files such as those
553 owned by root. Running tests inside the chroot allows us to control
557 buildroot: The buildroot of the current build.
558 board: The board the image was built for.
559 image_dir: The directory in which to find the image.
560 results_dir: The directory to store result files.
563 failures_lib.BuildScriptFailure if the test script fails.
568 '--test_results_root', cros_build_lib.ToChrootPath(results_dir),
569 cros_build_lib.ToChrootPath(image_dir),
571 RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True,
575 def RunSignerTests(buildroot, board):
576 cmd = ['./security_test_image', '--board=%s' % board]
577 RunBuildScript(buildroot, cmd, enter_chroot=True)
580 def RunUnitTests(buildroot, board, full, blacklist=None, extra_env=None):
581 cmd = ['cros_run_unit_tests', '--board=%s' % board]
583 # If we aren't running ALL tests, then restrict to just the packages
584 # uprev noticed were changed.
586 package_file = _PACKAGE_FILE % {'buildroot': buildroot}
587 cmd += ['--package_file=%s' % git.ReinterpretPathForChroot(package_file)]
590 cmd += ['--blacklist_packages=%s' % ' '.join(blacklist)]
592 RunBuildScript(buildroot, cmd, enter_chroot=True, extra_env=extra_env or {})
595 def RunTestSuite(buildroot, board, image_dir, results_dir, test_type,
596 whitelist_chrome_crashes, archive_dir):
597 """Runs the test harness suite."""
598 results_dir_in_chroot = os.path.join(buildroot, 'chroot',
599 results_dir.lstrip('/'))
600 osutils.RmDir(results_dir_in_chroot, ignore_missing=True)
602 cwd = os.path.join(buildroot, 'src', 'scripts')
603 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
606 '--board=%s' % board,
609 '--target_image=%s' % image_path,
610 '--test_results_root=%s' % results_dir_in_chroot
613 if test_type not in constants.VALID_VM_TEST_TYPES:
614 raise AssertionError('Unrecognized test type %r' % test_type)
616 if test_type == constants.FULL_AU_TEST_TYPE:
617 cmd.append('--archive_dir=%s' % archive_dir)
619 cmd.append('--quick')
620 if test_type == constants.SMOKE_SUITE_TEST_TYPE:
621 cmd.append('--only_verify')
622 cmd.append('--suite=smoke')
623 elif test_type == constants.TELEMETRY_SUITE_TEST_TYPE:
624 cmd.append('--suite=telemetry_unit')
626 if whitelist_chrome_crashes:
627 cmd.append('--whitelist_chrome_crashes')
629 result = cros_build_lib.RunCommand(cmd, cwd=cwd, error_code_ok=True)
630 if result.returncode:
631 if os.path.exists(results_dir_in_chroot):
632 error = '%s exited with code %d' % (' '.join(cmd), result.returncode)
633 with open(results_dir_in_chroot + '/failed_test_command', 'w') as failed:
636 raise TestFailure('** VMTests failed with code %d **' % result.returncode)
639 def RunDevModeTest(buildroot, board, image_dir):
640 """Runs the dev mode testing script to verify dev-mode scripts work."""
641 crostestutils = os.path.join(buildroot, 'src', 'platform', 'crostestutils')
642 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
643 test_script = 'devmode-test/devinstall_test.py'
644 cmd = [os.path.join(crostestutils, test_script), '--verbose', board,
646 cros_build_lib.RunCommand(cmd)
649 def RunCrosVMTest(board, image_dir):
650 """Runs cros_vm_test script to verify cros flash/deploy works."""
651 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
652 test = cros_vm_test.CrosCommandTest(board, image_path)
656 def ListFailedTests(results_path):
657 """Returns a list of failed tests.
659 Parse the test report logs from autotest to find failed tests.
662 results_path: Path to the directory of test results.
665 A lists of (test_name, relative/path/to/failed/tests)
667 # TODO: we don't have to parse the log to find failed tests once
668 # crbug.com/350520 is fixed.
670 for path, _, filenames in os.walk(results_path):
671 reports.extend([os.path.join(path, x) for x in filenames
672 if x == _TEST_REPORT_FILENAME])
676 for report in reports:
677 cros_build_lib.Info('Parsing test report %s', report)
678 # Format used in the report:
679 # /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
680 # 2_autotest_tests/results-01-security_OpenSSLBlacklist [ FAILED ]
681 # /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
682 # 2_autotest_tests/results-01-security_OpenSSLBlacklist/ \
683 # security_OpenBlacklist [ FAILED ]
684 with open(report) as f:
685 failed_re = re.compile(r'([\./\w-]*)\s*\[\s*(\S+?)\s*\]')
686 test_name_re = re.compile(r'results-[\d]+?-([\.\w_]*)')
688 r = failed_re.search(line)
689 if r and r.group(2) == _TEST_FAILED:
690 # Process only failed tests.
691 file_path = r.group(1)
692 match = test_name_re.search(file_path)
694 test_name = match.group(1)
696 # If no match is found (due to format change or other
697 # reasons), simply use the last component of file_path.
698 test_name = os.path.basename(file_path)
700 # A test may have subtests. We don't want to list all subtests.
701 if test_name not in processed_tests:
702 base_dirname = os.path.basename(results_path)
703 # Get the relative path from the test_results directory. Note
704 # that file_path is a chroot path, while results_path is a
705 # non-chroot path, so we cannot use os.path.relpath directly.
706 rel_path = file_path.split(base_dirname)[1].lstrip(os.path.sep)
707 failed_tests.append((test_name, rel_path))
708 processed_tests.append(test_name)
713 def GetTestResultsDir(buildroot, test_results_dir):
714 """Returns the test results directory located in chroot.
717 buildroot: Root directory where build occurs.
718 test_results_dir: Path from buildroot/chroot to find test results.
719 This must a subdir of /tmp.
721 test_results_dir = test_results_dir.lstrip('/')
722 return os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR, test_results_dir)
725 def ArchiveTestResults(results_path, archive_dir):
726 """Archives the test results to |archive_dir|.
729 results_path: Path to test results.
730 archive_dir: Local directory to archive to.
732 cros_build_lib.SudoRunCommand(['chmod', '-R', 'a+rw', results_path],
734 if os.path.exists(archive_dir):
735 osutils.RmDir(archive_dir)
737 def _ShouldIgnore(dirname, file_list):
738 # Note: We exclude VM disk and memory images. Instead, they are
739 # archived via ArchiveVMFiles. Also skip any symlinks. gsutil
740 # hangs on broken symlinks.
741 return [x for x in file_list if
742 x.startswith(constants.VM_DISK_PREFIX) or
743 x.startswith(constants.VM_MEM_PREFIX) or
744 os.path.islink(os.path.join(dirname, x))]
746 shutil.copytree(results_path, archive_dir, symlinks=False,
747 ignore=_ShouldIgnore)
750 def BuildAndArchiveTestResultsTarball(src_dir, buildroot):
751 """Create a compressed tarball of test results.
754 src_dir: The directory containing the test results.
755 buildroot: Build root directory.
758 The name of the tarball.
760 target = '%s.tgz' % src_dir.rstrip(os.path.sep)
761 chroot = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
762 cros_build_lib.CreateTarball(
763 target, src_dir, compression=cros_build_lib.COMP_GZIP,
765 return os.path.basename(target)
768 def ArchiveVMFiles(buildroot, test_results_dir, archive_path):
769 """Archives the VM memory and disk images into tarballs.
771 There may be multiple tests (e.g. SimpleTestUpdate and
772 SimpleTestUpdateAndVerify), and multiple files for each test (one
773 for the VM disk, and one for the VM memory). We create a separate
774 tar file for each of these files, so that each can be downloaded
778 buildroot: Build root directory.
779 test_results_dir: Path from buildroot/chroot to find test results.
780 This must a subdir of /tmp.
781 archive_path: Directory the tarballs should be written to.
784 The paths to the tarballs.
786 images_dir = os.path.join(buildroot, 'chroot', test_results_dir.lstrip('/'))
788 for path, _, filenames in os.walk(images_dir):
789 images.extend([os.path.join(path, filename) for filename in
790 fnmatch.filter(filenames, constants.VM_DISK_PREFIX + '*')])
791 images.extend([os.path.join(path, filename) for filename in
792 fnmatch.filter(filenames, constants.VM_MEM_PREFIX + '*')])
795 for image_path in images:
796 image_rel_path = os.path.relpath(image_path, images_dir)
797 image_parent_dir = os.path.dirname(image_path)
798 image_file = os.path.basename(image_path)
799 tarball_path = os.path.join(archive_path,
800 "%s.tar" % image_rel_path.replace('/', '_'))
801 # Note that tar will chdir to |image_parent_dir|, so that |image_file|
802 # is at the top-level of the tar file.
803 cros_build_lib.CreateTarball(tarball_path,
805 compression=cros_build_lib.COMP_BZIP2,
807 tar_files.append(tarball_path)
811 @failures_lib.SetFailureType(SuiteTimedOut, timeout_util.TimeoutError)
812 def RunHWTestSuite(build, suite, board, pool=None, num=None, file_bugs=None,
813 wait_for_results=None, priority=None, timeout_mins=None,
814 retry=None, minimum_duts=0, debug=True):
815 """Run the test suite in the Autotest lab.
818 build: The build is described as the bot_id and the build version.
819 e.g. x86-mario-release/R18-1655.0.0-a1-b1584.
820 suite: Name of the Autotest suite.
821 board: The board the test suite should be scheduled against.
822 pool: The pool of machines we should use to run the hw tests on.
823 num: Maximum number of devices to use when scheduling tests in the
825 file_bugs: File bugs on test failures for this suite run.
826 wait_for_results: If True, wait for autotest results before returning.
827 priority: Priority of this suite run.
828 timeout_mins: Timeout in minutes for the suite job and its sub-jobs.
829 retry: If True, will enable job-level retry. Only works when
830 wait_for_results is True.
831 minimum_duts: The minimum number of DUTs should be available in lab for the
832 suite job to be created. If it's set to 0, the check will be
834 debug: Whether we are in debug mode.
836 # TODO(scottz): RPC client option names are misnomers crosbug.com/26445.
837 cmd = [_AUTOTEST_RPC_CLIENT,
838 _AUTOTEST_RPC_HOSTNAME,
841 '--suite_name', suite,
844 # Add optional arguments to command, if present.
846 cmd += ['--pool', pool]
849 cmd += ['--num', str(num)]
851 if file_bugs is not None:
852 cmd += ['--file_bugs', str(file_bugs)]
854 if wait_for_results is not None:
855 cmd += ['--no_wait', str(not wait_for_results)]
857 if priority is not None:
858 cmd += ['--priority', priority]
860 if timeout_mins is not None:
861 cmd += ['--timeout_mins', str(timeout_mins)]
863 if retry is not None:
864 cmd += ['--retry', str(retry)]
866 if minimum_duts != 0:
867 cmd += ['--minimum_duts', str(minimum_duts)]
870 cros_build_lib.Info('RunHWTestSuite would run: %s',
871 cros_build_lib.CmdToStr(cmd))
873 if timeout_mins is None:
874 result = cros_build_lib.RunCommand(cmd, error_code_ok=True)
876 with timeout_util.Timeout(
877 timeout_mins * 60 + constants.HWTEST_TIMEOUT_EXTENSION):
878 result = cros_build_lib.RunCommand(cmd, error_code_ok=True)
880 # run_suite error codes:
881 # 0 - OK: Tests ran and passed.
882 # 1 - ERROR: Tests ran and failed (or timed out).
883 # 2 - WARNING: Tests ran and passed with warning(s). Note that 2
884 # may also be CLIENT_HTTP_CODE error returned by
885 # autotest_rpc_client.py. We ignore that case for now.
886 # 3 - INFRA_FAILURE: Tests did not complete due to lab issues.
887 # 4 - SUITE_TIMEOUT: Suite timed out. This could be caused by
888 # infrastructure failures or by test failures.
889 # 11, 12, 13 for cases when rpc is down, see autotest_rpc_errors.py.
890 lab_warning_codes = (2,)
891 infra_error_codes = (3, 11, 12, 13)
893 board_not_available_codes = (5,)
895 if result.returncode in lab_warning_codes:
896 raise TestWarning('** Suite passed with a warning code **')
897 elif result.returncode in infra_error_codes:
898 raise failures_lib.TestLabFailure(
899 '** HWTest did not complete due to infrastructure issues '
900 '(code %d) **' % result.returncode)
901 elif result.returncode in timeout_codes:
902 raise SuiteTimedOut('** Suite timed out before completion **')
903 elif result.returncode in board_not_available_codes:
904 raise BoardNotAvailable('** Board was not availble in the lab **')
905 elif result.returncode != 0:
906 raise TestFailure('** HWTest failed (code %d) **' % result.returncode)
909 def _GetAbortCQHWTestsURL(version, suite):
910 """Get the URL where we should save state about the specified abort command.
913 version: The version of the current build. E.g. R18-1655.0.0-rc1
914 suite: The suite argument that AbortCQHWTests was called with, if any.
916 url = '%s/hwtests-aborted/%s/suite=%s'
917 return url % (constants.MANIFEST_VERSIONS_GS_URL, version, suite)
920 def AbortCQHWTests(version, debug, suite=''):
921 """Abort the specified hardware tests on the commit queue.
924 version: The version of the current build. E.g. R18-1655.0.0-rc1
925 debug: Whether we are in debug mode.
926 suite: Name of the Autotest suite. If empty, abort all suites.
928 # Mark the substr/suite as aborted in Google Storage.
929 ctx = gs.GSContext(dry_run=debug)
930 ctx.Copy('-', _GetAbortCQHWTestsURL(version, suite), input='')
932 # Abort all jobs for the given version, containing the '-paladin' suffix.
933 # Example job id: link-paladin/R35-5542.0.0-rc1
934 substr = '%s/%s' % (cbuildbot_config.CONFIG_TYPE_PALADIN, version)
936 # Actually abort the build.
937 cmd = [_AUTOTEST_RPC_CLIENT,
938 _AUTOTEST_RPC_HOSTNAME,
943 cros_build_lib.Info('AbortCQHWTests would run: %s',
944 cros_build_lib.CmdToStr(cmd))
947 cros_build_lib.RunCommand(cmd)
948 except cros_build_lib.RunCommandError:
949 cros_build_lib.Warning('AbortCQHWTests failed', exc_info=True)
952 def HaveCQHWTestsBeenAborted(version, suite=''):
953 """Check in Google Storage whether the specified abort call was sent.
955 This function will return True if the following call has occurred:
956 AbortCQHWTests(version, debug=False, suite=suite)
959 version: The version of the current build. E.g. R18-1655.0.0-rc1
960 suite: The suite argument that AbortCQHWTests was called with, if any.
962 return gs.GSContext().Exists(_GetAbortCQHWTestsURL(version, suite))
965 def GenerateStackTraces(buildroot, board, test_results_dir,
966 archive_dir, got_symbols):
967 """Generates stack traces for logs in |gzipped_test_tarball|
970 buildroot: Root directory where build occurs.
971 board: Name of the board being worked on.
972 test_results_dir: Directory of the test results.
973 archive_dir: Local directory for archiving.
974 got_symbols: True if breakpad symbols have been generated.
977 List of stack trace file names.
979 stack_trace_filenames = []
980 asan_log_signaled = False
982 board_path = cros_build_lib.GetSysroot(board=board)
983 symbol_dir = os.path.join(board_path, 'usr', 'lib', 'debug', 'breakpad')
984 for curr_dir, _subdirs, files in os.walk(test_results_dir):
985 for curr_file in files:
986 full_file_path = os.path.join(curr_dir, curr_file)
987 processed_file_path = '%s.txt' % full_file_path
989 # Distinguish whether the current file is a minidump or asan_log.
990 if curr_file.endswith('.dmp'):
991 # Skip crash files that were purposely generated or if
992 # breakpad symbols are absent.
993 if not got_symbols or curr_file.find('crasher_nobreakpad') == 0:
995 # Precess the minidump from within chroot.
996 minidump = git.ReinterpretPathForChroot(full_file_path)
997 cwd = os.path.join(buildroot, 'src', 'scripts')
998 cros_build_lib.RunCommand(
999 ['minidump_stackwalk', minidump, symbol_dir], cwd=cwd,
1000 enter_chroot=True, error_code_ok=True, redirect_stderr=True,
1001 debug_level=logging.DEBUG, log_stdout_to_file=processed_file_path)
1004 # Prepend '/chrome/$board' path to the stack trace in log.
1006 with open(full_file_path) as f:
1008 # Stack frame line example to be matched here:
1009 # #0 0x721d1831 (/opt/google/chrome/chrome+0xb837831)
1010 stackline_match = re.search(r'^ *#[0-9]* 0x.* \(', line)
1012 frame_end = stackline_match.span()[1]
1013 line = line[:frame_end] + board_path + line[frame_end:]
1015 # Symbolize and demangle it.
1016 raw = cros_build_lib.RunCommand(
1017 ['asan_symbolize.py'], input=log_content, enter_chroot=True,
1018 debug_level=logging.DEBUG, capture_output=True,
1019 extra_env={'LLVM_SYMBOLIZER_PATH' : '/usr/bin/llvm-symbolizer'})
1020 cros_build_lib.RunCommand(['c++filt'],
1021 input=raw.output, debug_level=logging.DEBUG,
1022 cwd=buildroot, redirect_stderr=True,
1023 log_stdout_to_file=processed_file_path)
1024 # Break the bot if asan_log found. This is because some asan
1025 # crashes may not fail any test so the bot stays green.
1026 # Ex: crbug.com/167497
1027 if not asan_log_signaled:
1028 asan_log_signaled = True
1029 cros_build_lib.Error(
1030 'Asan crash occurred. See asan_logs in Artifacts.')
1031 cros_build_lib.PrintBuildbotStepFailure()
1033 # Append the processed file to archive.
1034 filename = ArchiveFile(processed_file_path, archive_dir)
1035 stack_trace_filenames.append(filename)
1037 return stack_trace_filenames
1040 @failures_lib.SetFailureType(failures_lib.BuilderFailure)
1041 def ArchiveFile(file_to_archive, archive_dir):
1042 """Archives the specified file.
1045 file_to_archive: Full path to file to archive.
1046 archive_dir: Local directory for archiving.
1049 The base name of the archived file.
1051 filename = os.path.basename(file_to_archive)
1053 archived_file = os.path.join(archive_dir, filename)
1054 shutil.copy(file_to_archive, archived_file)
1055 os.chmod(archived_file, 0o644)
1060 def MarkChromeAsStable(buildroot,
1064 chrome_version=None):
1065 """Returns the portage atom for the revved chrome ebuild - see man emerge."""
1066 cwd = os.path.join(buildroot, 'src', 'scripts')
1070 command = ['../../chromite/bin/cros_mark_chrome_as_stable',
1071 '--tracking_branch=%s' % tracking_branch]
1073 command.append('--boards=%s' % ':'.join(boards))
1075 command.append('--force_version=%s' % chrome_version)
1077 portage_atom_string = cros_build_lib.RunCommand(
1078 command + [chrome_rev],
1080 redirect_stdout=True,
1082 chroot_args=chroot_args,
1083 extra_env=extra_env).output.rstrip()
1085 if portage_atom_string:
1086 chrome_atom = portage_atom_string.splitlines()[-1].partition('=')[-1]
1088 cros_build_lib.Info('Found nothing to rev.')
1091 for board in boards:
1092 # If we're using a version of Chrome other than the latest one, we need
1093 # to unmask it manually.
1094 if chrome_rev != constants.CHROME_REV_LATEST:
1095 keywords_file = CHROME_KEYWORDS_FILE % {'board': board}
1096 cros_build_lib.SudoRunCommand(
1097 ['mkdir', '-p', os.path.dirname(keywords_file)],
1098 enter_chroot=True, cwd=cwd)
1099 cros_build_lib.SudoRunCommand(
1100 ['tee', keywords_file], input='=%s\n' % chrome_atom,
1101 enter_chroot=True, cwd=cwd)
1103 # Sanity check: We should always be able to merge the version of
1104 # Chrome we just unmasked.
1105 result = cros_build_lib.RunCommand(
1106 ['emerge-%s' % board, '-p', '--quiet', '=%s' % chrome_atom],
1107 enter_chroot=True, error_code_ok=True, combine_stdout_stderr=True,
1108 capture_output=True)
1109 if result.returncode:
1110 cros_build_lib.PrintBuildbotStepWarnings()
1111 cros_build_lib.Warning('\n%s' % result.output)
1112 cros_build_lib.Warning('Cannot emerge-%s =%s\nIs Chrome pinned to an '
1113 'older version?' % (board, chrome_atom))
1119 def CleanupChromeKeywordsFile(boards, buildroot):
1120 """Cleans chrome uprev artifact if it exists."""
1121 for board in boards:
1122 keywords_path_in_chroot = CHROME_KEYWORDS_FILE % {'board': board}
1123 keywords_file = '%s/chroot%s' % (buildroot, keywords_path_in_chroot)
1124 if os.path.exists(keywords_file):
1125 cros_build_lib.SudoRunCommand(['rm', '-f', keywords_file])
1128 def UprevPackages(buildroot, boards, overlays, enter_chroot=True):
1129 """Uprevs non-browser chromium os packages that have changed."""
1130 drop_file = _PACKAGE_FILE % {'buildroot': buildroot}
1132 overlays = [git.ReinterpretPathForChroot(x) for x in overlays]
1133 drop_file = git.ReinterpretPathForChroot(drop_file)
1134 cmd = ['cros_mark_as_stable', '--all',
1135 '--boards=%s' % ':'.join(boards),
1136 '--overlays=%s' % ':'.join(overlays),
1137 '--drop_file=%s' % drop_file,
1139 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=enter_chroot)
1142 def UprevPush(buildroot, overlays, dryrun):
1143 """Pushes uprev changes to the main line."""
1144 cmd = ['cros_mark_as_stable',
1145 '--srcroot=%s' % os.path.join(buildroot, 'src'),
1146 '--overlays=%s' % ':'.join(overlays)
1149 cmd.append('--dryrun')
1151 RunBuildScript(buildroot, cmd, chromite_cmd=True)
1154 def GenerateCPEExport(buildroot, board, useflags=None):
1155 """Generate CPE export.
1158 buildroot: The root directory where the build occurs.
1159 board: Board type that was built on this machine.
1160 useflags: A list of useflags for this build.
1163 A CommandResult object with the results of running the CPE
1166 cmd = ['cros_extract_deps', '--format=cpe', '--board=%s' % board,
1167 'virtual/target-os']
1170 env['USE'] = ' '.join(useflags)
1171 result = RunBuildScript(buildroot, cmd, enter_chroot=True,
1172 chromite_cmd=True, capture_output=True,
1177 def GenerateBreakpadSymbols(buildroot, board, debug):
1178 """Generate breakpad symbols.
1181 buildroot: The root directory where the build occurs.
1182 board: Board type that was built on this machine.
1183 debug: Include extra debugging output.
1185 # We don't care about firmware symbols.
1186 # See http://crbug.com/213670.
1187 exclude_dirs = ['firmware']
1189 cmd = ['cros_generate_breakpad_symbols', '--board=%s' % board,
1190 '--jobs=%s' % str(max([1, multiprocessing.cpu_count() / 2]))]
1191 cmd += ['--exclude-dir=%s' % x for x in exclude_dirs]
1194 RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True)
1197 def GenerateDebugTarball(buildroot, board, archive_path, gdb_symbols):
1198 """Generates a debug tarball in the archive_dir.
1201 buildroot: The root directory where the build occurs.
1202 board: Board type that was built on this machine
1203 archive_path: Directory where tarball should be stored.
1204 gdb_symbols: Include *.debug files for debugging core files with gdb.
1207 The filename of the created debug tarball.
1209 # Generate debug tarball. This needs to run as root because some of the
1210 # symbols are only readable by root.
1211 chroot = os.path.join(buildroot, 'chroot')
1212 board_dir = os.path.join(chroot, 'build', board, 'usr', 'lib')
1213 debug_tgz = os.path.join(archive_path, 'debug.tgz')
1218 extra_args = ['--exclude',
1219 os.path.join('debug', constants.AUTOTEST_BUILD_PATH),
1220 '--exclude', 'debug/tests']
1223 inputs = ['debug/breakpad']
1225 cros_build_lib.CreateTarball(
1226 debug_tgz, board_dir, sudo=True, compression=cros_build_lib.COMP_GZIP,
1227 chroot=chroot, inputs=inputs, extra_args=extra_args)
1229 # Fix permissions and ownership on debug tarball.
1230 cros_build_lib.SudoRunCommand(['chown', str(os.getuid()), debug_tgz])
1231 os.chmod(debug_tgz, 0o644)
1233 return os.path.basename(debug_tgz)
1236 def GenerateHtmlIndex(index, files, url_base=None, head=None, tail=None):
1237 """Generate a simple index.html file given a set of filenames
1240 index: The file to write the html index to.
1241 files: The list of files to create the index of. If a string, then it
1242 may be a path to a file (with one file per line), or a directory
1243 (which will be listed).
1244 url_base: The URL to prefix to all elements (otherwise they'll be relative).
1245 head: All the content before the listing. '<html><body>' if not specified.
1246 tail: All the content after the listing. '</body></html>' if not specified.
1248 def GenLink(target, name=None):
1251 return ('<li><a href="%s%s">%s</a></li>'
1252 % (url_base, target, name if name else target))
1254 if isinstance(files, (unicode, str)):
1255 if os.path.isdir(files):
1256 files = os.listdir(files)
1258 files = osutils.ReadFile(files).splitlines()
1259 url_base = url_base + '/' if url_base else ''
1262 head = '<html><body>'
1263 html = head + '<ul>'
1268 for a in sorted(set(files)):
1275 links.append(GenLink(*a))
1276 links.insert(0, GenLink(*dot_dot))
1277 links.insert(0, GenLink(*dot))
1278 html += '\n'.join(links)
1281 tail = '</body></html>'
1282 html += '</ul>' + tail
1284 osutils.WriteFile(index, html)
1287 def AppendToFile(file_path, string):
1288 """Append the string to the given file.
1290 This method provides atomic appends if the string is smaller than
1291 PIPE_BUF (> 512 bytes). It does not guarantee atomicity once the
1292 string is greater than that.
1295 file_path: File to be appended to.
1296 string: String to append to the file.
1298 osutils.WriteFile(file_path, string, mode='a')
1301 def UpdateUploadedList(last_uploaded, archive_path, upload_urls,
1303 """Updates the archive's UPLOADED file, and uploads it to Google Storage.
1306 buildroot: The root directory where the build occurs.
1307 last_uploaded: Filename of the last uploaded file.
1308 archive_path: Path to archive_dir.
1309 upload_urls: Iterable of GS locations where the UPLOADED file should be
1311 debug: Whether we are in debug mode.
1313 # Append to the uploaded list.
1314 filename = UPLOADED_LIST_FILENAME
1315 AppendToFile(os.path.join(archive_path, filename), last_uploaded + '\n')
1317 # Upload the updated list to Google Storage.
1318 UploadArchivedFile(archive_path, upload_urls, filename, debug,
1322 @failures_lib.SetFailureType(failures_lib.GSUploadFailure)
1323 def UploadArchivedFile(archive_path, upload_urls, filename, debug,
1324 update_list=False, timeout=2 * 60 * 60, acl=None):
1325 """Upload the specified file from the archive dir to Google Storage.
1328 archive_path: Path to archive dir.
1329 upload_urls: Iterable of GS locations where the UPLOADED file should be
1331 debug: Whether we are in debug mode.
1332 filename: Filename of the file to upload.
1333 update_list: Flag to update the list of uploaded files.
1334 timeout: Raise an exception if the upload takes longer than this timeout.
1335 acl: Canned gsutil acl to use (e.g. 'public-read'), otherwise the internal
1336 (private) one is used.
1338 local_path = os.path.join(archive_path, filename)
1339 gs_context = gs.GSContext(acl=acl, dry_run=debug)
1342 for upload_url in upload_urls:
1343 with timeout_util.Timeout(timeout):
1344 gs_context.CopyInto(local_path, upload_url, parallel=True,
1346 except timeout_util.TimeoutError:
1347 raise timeout_util.TimeoutError('Timed out uploading %s' % filename)
1349 # Update the list of uploaded files.
1351 UpdateUploadedList(filename, archive_path, upload_urls, debug)
1354 def UploadSymbols(buildroot, board, official, cnt, failed_list):
1355 """Upload debug symbols for this build."""
1356 log_cmd = ['upload_symbols', '--board', board]
1357 if failed_list is not None:
1358 log_cmd += ['--failed-list', str(failed_list)]
1360 log_cmd.append('--official_build')
1362 log_cmd += ['--upload-limit', str(cnt)]
1363 cros_build_lib.Info('Running: %s' % cros_build_lib.CmdToStr(log_cmd))
1365 ret = upload_symbols.UploadSymbols(
1366 board=board, official=official, upload_limit=cnt,
1367 root=os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR),
1368 failed_list=failed_list)
1370 # TODO(davidjames): Convert this to a fatal error.
1371 # See http://crbug.com/212437
1372 cros_build_lib.PrintBuildbotStepWarnings()
1375 def PushImages(board, archive_url, dryrun, profile, sign_types=()):
1376 """Push the generated image to the release bucket for signing."""
1377 # Log the equivalent command for debugging purposes.
1378 log_cmd = ['pushimage', '--board=%s' % board]
1381 log_cmd.append('-n')
1384 log_cmd.append('--profile=%s' % profile)
1387 log_cmd.append('--sign-types=%s' % ' '.join(sign_types))
1389 log_cmd.append(archive_url)
1390 cros_build_lib.Info('Running: %s' % cros_build_lib.CmdToStr(log_cmd))
1393 return pushimage.PushImage(archive_url, board, profile=profile,
1394 sign_types=sign_types, dry_run=dryrun)
1395 except pushimage.PushError as e:
1396 cros_build_lib.PrintBuildbotStepFailure()
1400 def BuildFactoryInstallImage(buildroot, board, extra_env):
1401 """Build a factory install image.
1404 buildroot: Root directory where build occurs.
1405 board: Board type that was built on this machine
1406 extra_env: Flags to be added to the environment for the new process.
1409 The basename of the symlink created for the image.
1412 # We use build_attempt=3 here to ensure that this image uses a different
1413 # output directory from our regular image and the factory test image.
1414 alias = _FACTORY_SHIM
1415 cmd = ['./build_image',
1416 '--board=%s' % board,
1418 '--symlink=%s' % alias,
1419 '--build_attempt=3',
1421 RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
1426 def MakeNetboot(buildroot, board, image_dir):
1427 """Build a netboot image.
1430 buildroot: Root directory where build occurs.
1431 board: Board type that was built on this machine.
1432 image_dir: Directory containing factory install shim.
1434 cmd = ['./make_netboot.sh',
1435 '--board=%s' % board,
1436 '--image_dir=%s' % git.ReinterpretPathForChroot(image_dir)]
1437 RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
1440 def MakeFactoryToolkit(buildroot, board, output_dir, version=None):
1441 """Build a factory toolkit.
1444 buildroot: Root directory where build occurs.
1445 board: Board type that was built on this machine.
1446 output_dir: Directory for the resulting factory toolkit.
1447 version: Version string to be included in ID string.
1449 cmd = ['./make_factory_toolkit.sh',
1450 '--board=%s' % board,
1451 '--output_dir=%s' % git.ReinterpretPathForChroot(output_dir)]
1452 if version is not None:
1453 cmd.extend(['--version', version])
1454 RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
1457 def BuildRecoveryImage(buildroot, board, image_dir, extra_env):
1458 """Build a recovery image.
1461 buildroot: Root directory where build occurs.
1462 board: Board type that was built on this machine.
1463 image_dir: Directory containing base image.
1464 extra_env: Flags to be added to the environment for the new process.
1466 image = os.path.join(image_dir, constants.BASE_IMAGE_BIN)
1467 cmd = ['./mod_image_for_recovery.sh',
1468 '--board=%s' % board,
1469 '--image=%s' % git.ReinterpretPathForChroot(image)]
1470 RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
1474 def BuildTarball(buildroot, input_list, tarball_output, cwd=None,
1475 compressed=True, **kwargs):
1476 """Tars and zips files and directories from input_list to tarball_output.
1479 buildroot: Root directory where build occurs.
1480 input_list: A list of files and directories to be archived.
1481 tarball_output: Path of output tar archive file.
1482 cwd: Current working directory when tar command is executed.
1483 compressed: Whether or not the tarball should be compressed with pbzip2.
1484 **kwargs: Keyword arguments to pass to CreateTarball.
1487 Return value of cros_build_lib.CreateTarball.
1489 compressor = cros_build_lib.COMP_NONE
1492 compressor = cros_build_lib.COMP_BZIP2
1493 chroot = os.path.join(buildroot, 'chroot')
1494 return cros_build_lib.CreateTarball(
1495 tarball_output, cwd, compression=compressor, chroot=chroot,
1496 inputs=input_list, **kwargs)
1499 def FindFilesWithPattern(pattern, target='./', cwd=os.curdir):
1500 """Search the root directory recursively for matching filenames.
1503 pattern: the pattern used to match the filenames.
1504 target: the target directory to search.
1505 cwd: current working directory.
1508 A list of paths of the matched files.
1510 # Backup the current working directory before changing it
1511 old_cwd = os.getcwd()
1515 for target, _, filenames in os.walk(target):
1516 for filename in fnmatch.filter(filenames, pattern):
1517 matches.append(os.path.join(target, filename))
1519 # Restore the working directory
1524 def BuildAUTestTarball(buildroot, board, work_dir, version, archive_url):
1525 """Tar up the au test artifacts into the tarball_dir.
1528 buildroot: Root directory where build occurs.
1529 board: Board type that was built on this machine.
1530 work_dir: Location for doing work.
1531 version: Basic version of the build i.e. 3289.23.0.
1532 archive_url: GS directory where we uploaded payloads.
1534 au_test_tarball = os.path.join(work_dir, 'au_control.tar.bz2')
1536 cwd = os.path.join(buildroot, 'src', 'third_party', 'autotest', 'files')
1537 control_files_subdir = os.path.join('autotest', 'au_control_files')
1539 autotest_dir = os.path.join(work_dir, control_files_subdir)
1540 os.makedirs(autotest_dir)
1542 # Get basic version without R*.
1543 basic_version = re.search('R[0-9]+-([0-9][\w.]+)', version).group(1)
1545 # Pass in the python paths to the libs full release test needs.
1547 chromite_path=buildroot,
1548 devserver_path=os.path.join(buildroot, 'src', 'platform', 'dev'))
1550 python_path = '%(chromite_path)s:%(devserver_path)s' % env_dict
1551 cmd = ['site_utils/autoupdate/full_release_test.py',
1552 '--npo', '--nmo', '--dump',
1553 '--dump_dir', autotest_dir, '--archive_url', archive_url,
1554 basic_version, board, '--log=debug']
1556 gs_context_dir = os.path.dirname(gs.GSContext.GetDefaultGSUtilBin())
1558 if not gs_context_dir in os.environ['PATH']:
1559 run_env = os.environ.copy()
1560 run_env['PATH'] += ':%s' % gs_context_dir
1562 run_env = os.environ
1564 run_env.setdefault('PYTHONPATH', '')
1565 run_env['PYTHONPATH'] += ':%s' % python_path
1567 cros_build_lib.RunCommand(cmd, env=run_env, cwd=cwd)
1568 BuildTarball(buildroot, [control_files_subdir], au_test_tarball, cwd=work_dir)
1569 return au_test_tarball
1572 def BuildFullAutotestTarball(buildroot, board, tarball_dir):
1573 """Tar up the full autotest directory into image_dir.
1576 buildroot: Root directory where build occurs.
1577 board: Board type that was built on this machine.
1578 tarball_dir: Location for storing autotest tarballs.
1581 A tuple the path of the full autotest tarball.
1584 tarball = os.path.join(tarball_dir, 'autotest.tar.bz2')
1585 cwd = os.path.abspath(os.path.join(buildroot, 'chroot', 'build', board,
1586 constants.AUTOTEST_BUILD_PATH, '..'))
1587 result = BuildTarball(buildroot, ['autotest'], tarball, cwd=cwd,
1590 # Emerging the autotest package to the factory test image while this is
1591 # running modifies the timestamp on /build/autotest/server by
1592 # adding a tmp directory underneath it.
1593 # When tar spots this, it flags this and returns
1594 # status code 1. The tarball is still OK, although there might be a few
1595 # unneeded (and garbled) tmp files. If tar fails in a different way, it'll
1596 # return an error code other than 1.
1597 # TODO: Fix the autotest ebuild. See http://crbug.com/237537
1598 if result.returncode not in (0, 1):
1599 raise Exception('Autotest tarball creation failed with exit code %s'
1600 % (result.returncode))
1605 def BuildImageZip(archive_dir, image_dir):
1606 """Build image.zip in archive_dir from contents of image_dir.
1608 Exclude the dev image from the zipfile.
1611 archive_dir: Directory to store image.zip.
1612 image_dir: Directory to zip up.
1615 The basename of the zipfile.
1617 filename = 'image.zip'
1618 zipfile = os.path.join(archive_dir, filename)
1619 cros_build_lib.RunCommand(['zip', zipfile, '-r', '.'], cwd=image_dir,
1620 capture_output=True)
1624 def BuildStandaloneArchive(archive_dir, image_dir, artifact_info):
1625 """Create a compressed archive from the specified image information.
1627 The artifact info is derived from a JSON file in the board overlay. It
1628 should be in the following format:
1636 Each artifact can contain the following keys:
1637 input - Required. A list of paths and globs that expands to
1638 the list of files to archive.
1639 output - the name of the archive to be created. If omitted,
1640 it will default to the first filename, stripped of
1641 extensions, plus the appropriate .tar.gz or other suffix.
1642 archive - "tar" or "zip". If omitted, files will be uploaded
1643 directly, without being archived together.
1644 compress - a value cros_build_lib.CompressionStrToType knows about. Only
1645 useful for tar. If omitted, an uncompressed tar will be created.
1648 archive_dir: Directory to store image zip.
1649 image_dir: Base path for all inputs.
1650 artifact_info: Extended archive configuration dictionary containing:
1651 - paths - required, list of files to archive.
1652 - output, archive & compress entries from the JSON file.
1655 The base name of the archive.
1658 A ValueError if the compression or archive values are unknown.
1659 A KeyError is a required field is missing from artifact_info.
1661 if 'archive' not in artifact_info:
1662 # Nothing to do, just return the list as-is.
1663 return artifact_info['paths']
1665 inputs = artifact_info['paths']
1666 archive = artifact_info['archive']
1667 compress = artifact_info.get('compress')
1668 compress_type = cros_build_lib.CompressionStrToType(compress)
1669 if compress_type is None:
1670 raise ValueError('unknown compression type: %s' % compress)
1672 # If the output is fixed, use that. Otherwise, construct it
1673 # from the name of the first archived file, stripping extensions.
1674 filename = artifact_info.get(
1675 'output', '%s.%s' % (os.path.splitext(inputs[0])[0], archive))
1676 if archive == 'tar':
1677 # Add the .compress extension if we don't have a fixed name.
1678 if 'output' not in artifact_info and compress:
1679 filename = "%s.%s" % (filename, compress)
1680 extra_env = { 'XZ_OPT' : '-1' }
1681 cros_build_lib.CreateTarball(
1682 os.path.join(archive_dir, filename), image_dir,
1683 inputs=inputs, compression=compress_type, extra_env=extra_env)
1684 elif archive == 'zip':
1685 cros_build_lib.RunCommand(
1686 ['zip', os.path.join(archive_dir, filename), '-r'] + inputs,
1687 cwd=image_dir, capture_output=True)
1689 raise ValueError('unknown archive type: %s' % archive)
1694 def BuildFirmwareArchive(buildroot, board, archive_dir):
1695 """Build firmware_from_source.tar.bz2 in archive_dir from build root.
1698 buildroot: Root directory where build occurs.
1699 board: Board name of build target.
1700 archive_dir: Directory to store output file.
1703 The basename of the archived file, or None if the target board does
1704 not have firmware from source.
1706 firmware_root = os.path.join(buildroot, 'chroot', 'build', board, 'firmware')
1707 source_list = [os.path.relpath(f, firmware_root)
1708 for f in glob.iglob(os.path.join(firmware_root, '*'))]
1712 archive_name = 'firmware_from_source.tar.bz2'
1713 archive_file = os.path.join(archive_dir, archive_name)
1714 BuildTarball(buildroot, source_list, archive_file, cwd=firmware_root)
1718 def BuildFactoryZip(buildroot, board, archive_dir, factory_shim_dir,
1719 factory_toolkit_dir, version=None):
1720 """Build factory_image.zip in archive_dir.
1723 buildroot: Root directory where build occurs.
1724 board: Board name of build target.
1725 archive_dir: Directory to store factory_image.zip.
1726 factory_shim_dir: Directory containing factory shim.
1727 factory_toolkit_dir: Directory containing factory toolkit.
1728 version: The version string to be included in the factory image.zip.
1731 The basename of the zipfile.
1733 filename = 'factory_image.zip'
1735 # Creates a staging temporary folder.
1736 temp_dir = tempfile.mkdtemp(prefix='cbuildbot_factory')
1738 zipfile = os.path.join(archive_dir, filename)
1739 cmd = ['zip', '-r', zipfile, '.']
1741 # Rules for archive: { folder: pattern }
1744 ['*factory_install*.bin', '*partition*', os.path.join('netboot', '*')],
1745 factory_toolkit_dir:
1746 ['*factory_image*.bin', '*partition*', 'install_factory_toolkit.run'],
1749 for folder, patterns in rules.items():
1750 if not folder or not os.path.exists(folder):
1752 basename = os.path.basename(folder)
1753 target = os.path.join(temp_dir, basename)
1754 os.symlink(folder, target)
1755 for pattern in patterns:
1756 cmd.extend(['--include', os.path.join(basename, pattern)])
1758 # Everything in /usr/local/factory/bundle gets overlaid into the
1760 bundle_src_dir = os.path.join(
1761 buildroot, 'chroot', 'build', board, 'usr', 'local', 'factory', 'bundle')
1762 if os.path.exists(bundle_src_dir):
1763 for f in os.listdir(bundle_src_dir):
1764 src_path = os.path.join(bundle_src_dir, f)
1765 os.symlink(src_path, os.path.join(temp_dir, f))
1766 cmd.extend(['--include',
1767 f if os.path.isfile(src_path) else
1768 os.path.join(f, '*')])
1770 # Add a version file in the zip file.
1771 if version is not None:
1772 version_file = os.path.join(temp_dir, 'BUILD_VERSION')
1773 osutils.WriteFile(version_file, version)
1774 cmd.extend(['--include', version_file])
1776 cros_build_lib.RunCommand(cmd, cwd=temp_dir, capture_output=True)
1777 osutils.RmDir(temp_dir)
1781 def ArchiveHWQual(buildroot, hwqual_name, archive_dir, image_dir):
1782 """Create a hwqual tarball in archive_dir.
1785 buildroot: Root directory where build occurs.
1786 hwqual_name: Name for tarball.
1787 archive_dir: Local directory for hwqual tarball.
1788 image_dir: Directory containing test image.
1790 scripts_dir = os.path.join(buildroot, 'src', 'scripts')
1791 cmd = [os.path.join(scripts_dir, 'archive_hwqual'),
1792 '--from', archive_dir,
1793 '--image_dir', image_dir,
1794 '--output_tag', hwqual_name]
1795 cros_build_lib.RunCommand(cmd, capture_output=True)
1796 return '%s.tar.bz2' % hwqual_name
1799 def CreateTestRoot(build_root):
1800 """Returns a temporary directory for test results in chroot.
1803 The path inside the chroot rather than whole path.
1805 # Create test directory within tmp in chroot.
1806 chroot = os.path.join(build_root, 'chroot')
1807 chroot_tmp = os.path.join(chroot, 'tmp')
1808 test_root = tempfile.mkdtemp(prefix='cbuildbot', dir=chroot_tmp)
1810 # Path inside chroot.
1811 return os.path.sep + os.path.relpath(test_root, start=chroot)
1814 def GeneratePayloads(build_root, target_image_path, archive_dir):
1815 """Generates the payloads for hw testing.
1818 build_root: The root of the chromium os checkout.
1819 target_image_path: The path to the image to generate payloads to.
1820 archive_dir: Where to store payloads we generated.
1822 real_target = os.path.realpath(target_image_path)
1823 # The path to the target should look something like this:
1824 # .../link/R37-5952.0.2014_06_12_2302-a1/chromiumos_test_image.bin
1825 board, os_version = real_target.split('/')[-3:-1]
1829 cwd = os.path.join(build_root, 'src', 'scripts')
1830 path = git.ReinterpretPathForChroot(
1831 os.path.join(build_root, 'src', 'platform', 'dev', 'host'))
1832 chroot_dir = os.path.join(build_root, 'chroot')
1833 chroot_tmp = os.path.join(chroot_dir, 'tmp')
1834 chroot_target = git.ReinterpretPathForChroot(target_image_path)
1836 with osutils.TempDir(base_dir=chroot_tmp,
1837 prefix='generate_payloads') as temp_dir:
1838 chroot_temp_dir = temp_dir.replace(chroot_dir, '', 1)
1841 os.path.join(path, 'cros_generate_update_payload'),
1842 '--image', chroot_target,
1843 '--output', os.path.join(chroot_temp_dir, 'update.gz')
1845 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1846 name = '_'.join([prefix, os_version, board, 'full', suffix])
1847 # Names for full payloads look something like this:
1848 # chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin
1849 shutil.move(os.path.join(temp_dir, 'update.gz'),
1850 os.path.join(archive_dir, name))
1852 cmd.extend(['--src_image', chroot_target])
1853 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1854 # Names for delta payloads look something like this:
1855 # chromeos_R37-5952.0.2014_06_12_2302-a1_R37-
1856 # 5952.0.2014_06_12_2302-a1_link_delta_dev.bin
1857 name = '_'.join([prefix, os_version, os_version, board, 'delta', suffix])
1858 shutil.move(os.path.join(temp_dir, 'update.gz'),
1859 os.path.join(archive_dir, name))
1862 os.path.join(path, 'cros_generate_stateful_update_payload'),
1863 '--image', chroot_target,
1864 '--output', chroot_temp_dir
1866 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1867 shutil.move(os.path.join(temp_dir, STATEFUL_FILE),
1868 os.path.join(archive_dir, STATEFUL_FILE))
1871 def GetChromeLKGM(revision):
1872 """Returns the ChromeOS LKGM from Chrome given the git revision."""
1873 revision = revision or 'refs/heads/master'
1874 lkgm_url_path = '%s/+/%s/%s?format=text' % (
1875 constants.CHROMIUM_SRC_PROJECT, revision, constants.PATH_TO_CHROME_LKGM)
1876 contents_b64 = gob_util.FetchUrl(constants.EXTERNAL_GOB_HOST, lkgm_url_path)
1877 return base64.b64decode(contents_b64.read()).strip()
1880 def SyncChrome(build_root, chrome_root, useflags, tag=None, revision=None):
1884 build_root: The root of the chromium os checkout.
1885 chrome_root: The directory where chrome is stored.
1886 useflags: Array of use flags.
1887 tag: If supplied, the Chrome tag to sync.
1888 revision: If supplied, the Chrome revision to sync.
1890 # --reset tells sync_chrome to blow away local changes and to feel
1891 # free to delete any directories that get in the way of syncing. This
1892 # is needed for unattended operation.
1893 sync_chrome = os.path.join(build_root, 'chromite', 'bin', 'sync_chrome')
1894 internal = constants.USE_CHROME_INTERNAL in useflags
1895 cmd = [sync_chrome, '--reset']
1896 cmd += ['--internal'] if internal else []
1897 cmd += ['--tag', tag] if tag is not None else []
1898 cmd += ['--revision', revision] if revision is not None else []
1899 cmd += [chrome_root]
1900 retry_util.RunCommandWithRetries(constants.SYNC_RETRIES, cmd, cwd=build_root)
1903 def PatchChrome(chrome_root, patch, subdir):
1904 """Apply a patch to Chrome.
1907 chrome_root: The directory where chrome is stored.
1908 patch: Rietveld issue number to apply.
1909 subdir: Subdirectory to apply patch in.
1911 cmd = ['apply_issue', '-i', patch]
1912 cros_build_lib.RunCommand(cmd, cwd=os.path.join(chrome_root, subdir))
1915 class ChromeSDK(object):
1916 """Wrapper for the 'cros chrome-sdk' command."""
1918 DEFAULT_TARGETS = ('chrome', 'chrome_sandbox', 'nacl_helper',)
1920 DEFAULT_JOBS_GOMA = 500
1922 def __init__(self, cwd, board, extra_args=None, chrome_src=None, goma=False,
1923 debug_log=True, cache_dir=None, target_tc=None,
1924 toolchain_url=None):
1928 cwd: Where to invoke 'cros chrome-sdk'.
1929 board: The board to run chrome-sdk for.
1930 extra_args: Extra args to pass in on the command line.
1931 chrome_src: Path to pass in with --chrome-src.
1932 goma: If True, run using goma.
1933 debug_log: If set, run with debug log-level.
1934 cache_dir: Specify non-default cache directory.
1935 target_tc: Override target toolchain.
1936 toolchain_url: Override toolchain url pattern.
1940 self.extra_args = extra_args or []
1942 self.extra_args += ['--chrome-src', chrome_src]
1945 self.extra_args.append('--nogoma')
1946 self.debug_log = debug_log
1947 self.cache_dir = cache_dir
1948 self.target_tc = target_tc
1949 self.toolchain_url = toolchain_url
1951 def Run(self, cmd, extra_args=None):
1952 """Run a command inside the chrome-sdk context."""
1955 cros_cmd += ['--log-level', 'debug']
1957 cros_cmd += ['--cache-dir', self.cache_dir]
1959 self.extra_args += ['--target-tc', self.target_tc]
1960 if self.toolchain_url:
1961 self.extra_args += ['--toolchain-url', self.toolchain_url]
1962 cros_cmd += ['chrome-sdk', '--board', self.board] + self.extra_args
1963 cros_cmd += (extra_args or []) + ['--'] + cmd
1964 cros_build_lib.RunCommand(cros_cmd, cwd=self.cwd)
1966 def Ninja(self, jobs=None, debug=False, targets=DEFAULT_TARGETS):
1967 """Run 'ninja' inside a chrome-sdk context.
1970 jobs: The number of -j jobs to run.
1971 debug: Whether to do a Debug build (defaults to Release).
1972 targets: The targets to compile.
1975 jobs = self.DEFAULT_JOBS_GOMA if self.goma else self.DEFAULT_JOBS
1976 flavor = 'Debug' if debug else 'Release'
1977 cmd = ['ninja', '-C', 'out_%s/%s' % (self.board, flavor) , '-j', str(jobs)]
1978 self.Run(cmd + list(targets))