1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Module containing the various individual commands a builder can run."""
7 from __future__ import print_function
14 import multiprocessing
20 from chromite.cbuildbot import cbuildbot_config
21 from chromite.cbuildbot import failures_lib
22 from chromite.cbuildbot import constants
23 from chromite.cros.tests import cros_vm_test
24 from chromite.lib import cros_build_lib
25 from chromite.lib import git
26 from chromite.lib import gob_util
27 from chromite.lib import gs
28 from chromite.lib import locking
29 from chromite.lib import osutils
30 from chromite.lib import parallel
31 from chromite.lib import retry_util
32 from chromite.lib import timeout_util
33 from chromite.scripts import pushimage
34 from chromite.scripts import upload_symbols
37 _PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list'
38 CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome')
39 _CROS_ARCHIVE_URL = 'CROS_ARCHIVE_URL'
40 _FACTORY_SHIM = 'factory_shim'
41 _AUTOTEST_RPC_CLIENT = ('/b/build_internal/scripts/slave-internal/autotest_rpc/'
42 'autotest_rpc_client.py')
43 _AUTOTEST_RPC_HOSTNAME = 'master2'
44 _LOCAL_BUILD_FLAGS = ['--nousepkg', '--reuse_pkgs_from_local_boards']
45 UPLOADED_LIST_FILENAME = 'UPLOADED'
46 STATEFUL_FILE = 'stateful.tgz'
47 # For sorting through VM test results.
48 _TEST_REPORT_FILENAME = 'test_report.log'
49 _TEST_PASSED = 'PASSED'
50 _TEST_FAILED = 'FAILED'
53 class TestFailure(failures_lib.StepFailure):
54 """Raised if a test stage (e.g. VMTest) fails."""
56 class TestWarning(failures_lib.StepFailure):
57 """Raised if a test stage (e.g. VMTest) returns a warning code."""
59 class SuiteTimedOut(failures_lib.TestLabFailure):
60 """Raised if a test suite timed out with no test failures."""
63 # =========================== Command Helpers =================================
66 def RunBuildScript(buildroot, cmd, chromite_cmd=False, **kwargs):
67 """Run a build script, wrapping exceptions as needed.
69 This wraps RunCommand(cmd, cwd=buildroot, **kwargs), adding extra logic to
70 help determine the cause of command failures.
71 - If a package fails to build, a PackageBuildFailure exception is thrown,
72 which lists exactly which packages failed to build.
73 - If the command fails for a different reason, a BuildScriptFailure
76 We detect what packages failed to build by creating a temporary status file,
77 and passing that status file to parallel_emerge via the
78 PARALLEL_EMERGE_STATUS_FILE variable.
81 buildroot: The root of the build directory.
82 cmd: The command to run.
83 chromite_cmd: Whether the command should be evaluated relative to the
84 chromite/bin subdir of the |buildroot|.
85 kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
86 In addition, if 'sudo' kwarg is True, SudoRunCommand will be used.
88 assert not kwargs.get('shell', False), 'Cannot execute shell commands'
89 kwargs.setdefault('cwd', buildroot)
90 enter_chroot = kwargs.get('enter_chroot', False)
91 sudo = kwargs.pop('sudo', False)
96 cmd[0] = git.ReinterpretPathForChroot(
97 os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0]))
99 cmd[0] = os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0])
101 # If we are entering the chroot, create status file for tracking what
102 # packages failed to build.
103 chroot_tmp = os.path.join(buildroot, 'chroot', 'tmp')
105 with cros_build_lib.ContextManagerStack() as stack:
106 if enter_chroot and os.path.exists(chroot_tmp):
107 kwargs['extra_env'] = (kwargs.get('extra_env') or {}).copy()
108 status_file = stack.Add(tempfile.NamedTemporaryFile, dir=chroot_tmp)
109 kwargs['extra_env']['PARALLEL_EMERGE_STATUS_FILE'] = \
110 git.ReinterpretPathForChroot(status_file.name)
111 runcmd = cros_build_lib.RunCommand
113 runcmd = cros_build_lib.SudoRunCommand
115 return runcmd(cmd, **kwargs)
116 except cros_build_lib.RunCommandError as ex:
117 # Print the original exception.
118 cros_build_lib.Error('\n%s', ex)
120 # Check whether a specific package failed. If so, wrap the exception
121 # appropriately. These failures are usually caused by a recent CL, so we
122 # don't ever treat these failures as flaky.
123 if status_file is not None:
125 failed_packages = status_file.read().split()
127 raise failures_lib.PackageBuildFailure(ex, cmd[0], failed_packages)
129 # Looks like a generic failure. Raise a BuildScriptFailure.
130 raise failures_lib.BuildScriptFailure(ex, cmd[0])
133 def GetInput(prompt):
134 """Helper function to grab input from a user. Makes testing easier."""
135 return raw_input(prompt)
138 def ValidateClobber(buildroot):
139 """Do due diligence if user wants to clobber buildroot.
142 buildroot: buildroot that's potentially clobbered.
145 True if the clobber is ok.
147 cwd = os.path.dirname(os.path.realpath(__file__))
148 if cwd.startswith(buildroot):
149 cros_build_lib.Die('You are trying to clobber this chromite checkout!')
152 cros_build_lib.Die('Refusing to clobber your system!')
154 if os.path.exists(buildroot):
155 return cros_build_lib.BooleanPrompt(default=False)
159 # =========================== Main Commands ===================================
162 def BuildRootGitCleanup(buildroot):
163 """Put buildroot onto manifest branch. Delete branches created on last run.
166 buildroot: buildroot to clean up.
168 lock_path = os.path.join(buildroot, '.clean_lock')
169 deleted_objdirs = multiprocessing.Event()
171 def RunCleanupCommands(project, cwd):
172 with locking.FileLock(lock_path, verbose=False).read_lock() as lock:
173 # Calculate where the git repository is stored.
174 relpath = os.path.relpath(cwd, buildroot)
175 projects_dir = os.path.join(buildroot, '.repo', 'projects')
176 project_objects_dir = os.path.join(buildroot, '.repo', 'project-objects')
177 repo_git_store = '%s.git' % os.path.join(projects_dir, relpath)
178 repo_obj_store = '%s.git' % os.path.join(project_objects_dir, project)
181 if os.path.isdir(cwd):
182 git.CleanAndDetachHead(cwd)
183 git.GarbageCollection(cwd)
184 except cros_build_lib.RunCommandError as e:
186 cros_build_lib.PrintBuildbotStepWarnings()
187 logging.warn('\n%s', result.error)
189 # If there's no repository corruption, just delete the index.
190 corrupted = git.IsGitRepositoryCorrupted(cwd)
192 logging.warn('Deleting %s because %s failed', cwd, result.cmd)
193 osutils.RmDir(cwd, ignore_missing=True)
195 # Looks like the object dir is corrupted. Delete the whole repository.
196 deleted_objdirs.set()
197 for store in (repo_git_store, repo_obj_store):
198 logging.warn('Deleting %s as well', store)
199 osutils.RmDir(store, ignore_missing=True)
201 # Delete all branches created by cbuildbot.
202 if os.path.isdir(repo_git_store):
203 cmd = ['branch', '-D'] + list(constants.CREATED_BRANCHES)
204 git.RunGit(repo_git_store, cmd, error_code_ok=True)
206 # Cleanup all of the directories.
207 dirs = [[attrs['name'], os.path.join(buildroot, attrs['path'])] for attrs in
208 git.ManifestCheckout.Cached(buildroot).ListCheckouts()]
209 parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
211 # repo shares git object directories amongst multiple project paths. If the
212 # first pass deleted an object dir for a project path, then other repositories
213 # (project paths) of that same project may now be broken. Do a second pass to
214 # clean them up as well.
215 if deleted_objdirs.is_set():
216 parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
219 def CleanUpMountPoints(buildroot):
220 """Cleans up any stale mount points from previous runs."""
221 # Scrape it from /proc/mounts since it's easily accessible;
222 # additionally, unmount in reverse order of what's listed there
223 # rather than trying a reverse sorting; it's possible for
225 # mount /foon/blah -o loop /a
226 # which reverse sorting cannot handle.
227 buildroot = os.path.realpath(buildroot).rstrip('/') + '/'
228 mounts = [mtab.destination for mtab in osutils.IterateMountPoints() if
229 mtab.destination.startswith(buildroot)]
231 for mount_pt in reversed(mounts):
232 osutils.UmountDir(mount_pt, lazy=True, cleanup=False)
235 def WipeOldOutput(buildroot):
236 """Wipes out build output directory.
239 buildroot: Root directory where build occurs.
240 board: Delete image directories for this board name.
242 image_dir = os.path.join(buildroot, 'src', 'build', 'images')
243 osutils.RmDir(image_dir, ignore_missing=True, sudo=True)
246 def MakeChroot(buildroot, replace, use_sdk, chrome_root=None, extra_env=None):
247 """Wrapper around make_chroot."""
248 cmd = ['cros_sdk', '--buildbot-log-version']
249 cmd.append('--create' if use_sdk else '--bootstrap')
252 cmd.append('--replace')
255 cmd.append('--chrome_root=%s' % chrome_root)
257 RunBuildScript(buildroot, cmd, extra_env=extra_env)
260 def RunChrootUpgradeHooks(buildroot, chrome_root=None):
261 """Run the chroot upgrade hooks in the chroot."""
264 chroot_args.append('--chrome_root=%s' % chrome_root)
266 RunBuildScript(buildroot, ['./run_chroot_version_hooks'],
267 enter_chroot=True, chroot_args=chroot_args)
270 def RefreshPackageStatus(buildroot, boards, debug):
271 """Wrapper around refresh_package_status"""
272 # First run check_gdata_token to validate or refresh auth token.
273 cmd = ['check_gdata_token']
274 RunBuildScript(buildroot, cmd, chromite_cmd=True)
276 # Prepare refresh_package_status command to update the package spreadsheet.
277 cmd = ['refresh_package_status']
279 # Skip the host board if present.
280 board = ':'.join([b for b in boards if b != 'amd64-host'])
281 cmd.append('--board=%s' % board)
283 # Upload to the test spreadsheet only when in debug mode.
285 cmd.append('--test-spreadsheet')
287 # Actually run prepared refresh_package_status command.
288 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
290 # Disabling the auto-filing of Tracker issues for now - crbug.com/334260.
291 #SyncPackageStatus(buildroot, debug)
294 def SyncPackageStatus(buildroot, debug):
295 """Wrapper around sync_package_status."""
296 # Run sync_package_status to create Tracker issues for outdated
297 # packages. At the moment, this runs only for groups that have opted in.
298 basecmd = ['sync_package_status']
300 basecmd.extend(['--pretend', '--test-spreadsheet'])
302 cmdargslist = [['--team=build'],
303 ['--team=kernel', '--default-owner=arscott'],
306 for cmdargs in cmdargslist:
307 cmd = basecmd + cmdargs
308 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
311 def SetSharedUserPassword(buildroot, password):
312 """Wrapper around set_shared_user_password.sh"""
313 if password is not None:
314 cmd = ['./set_shared_user_password.sh', password]
315 RunBuildScript(buildroot, cmd, enter_chroot=True)
317 passwd_file = os.path.join(buildroot, 'chroot/etc/shared_user_passwd.txt')
318 osutils.SafeUnlink(passwd_file, sudo=True)
321 def UpdateChroot(buildroot, usepkg, toolchain_boards=None, extra_env=None):
322 """Wrapper around update_chroot.
325 buildroot: The buildroot of the current build.
326 usepkg: Whether to use binary packages when setting up the toolchain.
327 toolchain_boards: List of boards to always include.
328 extra_env: A dictionary of environmental variables to set during generation.
330 cmd = ['./update_chroot']
333 cmd.extend(['--nousepkg'])
336 cmd.extend(['--toolchain_boards', ','.join(toolchain_boards)])
338 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
341 def SetupBoard(buildroot, board, usepkg, chrome_binhost_only=False,
342 extra_env=None, force=False, profile=None, chroot_upgrade=True):
343 """Wrapper around setup_board.
346 buildroot: The buildroot of the current build.
347 board: The board to set up.
348 usepkg: Whether to use binary packages when setting up the board.
349 chrome_binhost_only: If set, only use binary packages on the board for
351 extra_env: A dictionary of environmental variables to set during generation.
352 force: Whether to remove the board prior to setting it up.
353 profile: The profile to use with this board.
354 chroot_upgrade: Whether to update the chroot. If the chroot is already up to
355 date, you can specify chroot_upgrade=False.
357 cmd = ['./setup_board', '--board=%s' % board,
358 '--accept_licenses=@CHROMEOS']
360 # This isn't the greatest thing, but emerge's dependency calculation
361 # isn't the speediest thing, so let callers skip this step when they
362 # know the system is up-to-date already.
363 if not chroot_upgrade:
364 cmd.append('--skip_chroot_upgrade')
367 cmd.append('--profile=%s' % profile)
370 cmd.extend(_LOCAL_BUILD_FLAGS)
372 if chrome_binhost_only:
373 cmd.append('--chrome_binhost_only')
376 cmd.append('--force')
378 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
381 class MissingBinpkg(failures_lib.InfrastructureFailure):
382 """Error class for when we are missing an essential binpkg."""
385 def VerifyBinpkg(buildroot, board, pkg, extra_env=None):
386 """Verify that an appropriate binary package exists for |pkg|.
389 buildroot: The buildroot of the current build.
390 board: The board to set up.
391 pkg: The package to look for.
392 extra_env: A dictionary of environmental variables to set.
394 cmd = ['emerge-%s' % board, '-pegNv', '--color=n', 'virtual/target-os']
395 result = RunBuildScript(buildroot, cmd, capture_output=True,
396 enter_chroot=True, extra_env=extra_env)
397 pattern = r'^\[(ebuild|binary).*%s' % re.escape(pkg)
398 m = re.search(pattern, result.output, re.MULTILINE)
399 if m and m.group(1) == 'ebuild':
400 cros_build_lib.Info('(output):\n%s', result.output)
401 msg = 'Cannot find prebuilts for %s on %s' % (pkg, board)
402 raise MissingBinpkg(msg)
405 def Build(buildroot, board, build_autotest, usepkg, chrome_binhost_only,
406 packages=(), skip_chroot_upgrade=True, noworkon=False,
407 extra_env=None, chrome_root=None):
408 """Wrapper around build_packages.
411 buildroot: The buildroot of the current build.
412 board: The board to set up.
413 build_autotest: Whether to build autotest-related packages.
414 usepkg: Whether to use binary packages.
415 chrome_binhost_only: If set, only use binary packages on the board for
417 packages: Tuple of specific packages we want to build. If empty,
418 build_packages will calculate a list of packages automatically.
419 skip_chroot_upgrade: Whether to skip the chroot update. If the chroot is
420 not yet up to date, you should specify skip_chroot_upgrade=False.
421 noworkon: If set, don't force-build workon packages.
422 extra_env: A dictionary of environmental variables to set during generation.
423 chrome_root: The directory where chrome is stored.
425 cmd = ['./build_packages', '--board=%s' % board,
426 '--accept_licenses=@CHROMEOS']
428 if not build_autotest:
429 cmd.append('--nowithautotest')
431 if skip_chroot_upgrade:
432 cmd.append('--skip_chroot_upgrade')
435 cmd.extend(_LOCAL_BUILD_FLAGS)
437 if chrome_binhost_only:
438 cmd.append('--chrome_binhost_only')
441 cmd.append('--noworkon')
445 chroot_args.append('--chrome_root=%s' % chrome_root)
448 RunBuildScript(buildroot, cmd, extra_env=extra_env, chroot_args=chroot_args,
452 FirmwareVersions = collections.namedtuple(
458 def GetFirmwareVersions(buildroot, board):
459 """Extract version information from the firmware updater, if one exists.
462 buildroot: The buildroot of the current build.
463 board: The board the firmware is for.
466 (main fw version, ec fw version)
467 Each element will either be set to the string output by the firmware
468 updater shellball, or None if there is no firmware updater.
470 updater = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR,
471 cros_build_lib.GetSysroot(board).lstrip(os.path.sep),
472 'usr', 'sbin', 'chromeos-firmwareupdate')
473 if not os.path.isfile(updater):
474 return FirmwareVersions(None, None)
475 updater = git.ReinterpretPathForChroot(updater)
477 result = cros_build_lib.RunCommand([updater, '-V'], enter_chroot=True,
478 capture_output=True, log_output=True,
480 main = re.search(r'BIOS version:\s*(?P<version>.*)', result.output)
481 ec = re.search(r'EC version:\s*(?P<version>.*)', result.output)
482 return (main.group('version') if main else None,
483 ec.group('version') if ec else None)
486 def BuildImage(buildroot, board, images_to_build, version=None,
487 rootfs_verification=True, extra_env=None, disk_layout=None):
489 # Default to base if images_to_build is passed empty.
490 if not images_to_build:
491 images_to_build = ['base']
493 version_str = '--version=%s' % (version or '')
495 cmd = ['./build_image', '--board=%s' % board, '--replace', version_str]
497 if not rootfs_verification:
498 cmd += ['--noenable_rootfs_verification']
501 cmd += ['--disk_layout=%s' % disk_layout]
503 cmd += images_to_build
505 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
508 def GenerateAuZip(buildroot, image_dir, extra_env=None):
509 """Run the script which generates au-generator.zip.
512 buildroot: The buildroot of the current build.
513 image_dir: The directory in which to store au-generator.zip.
514 extra_env: A dictionary of environmental variables to set during generation.
517 failures_lib.BuildScriptFailure if the called script fails.
519 chroot_image_dir = git.ReinterpretPathForChroot(image_dir)
520 cmd = ['./build_library/generate_au_zip.py', '-o', chroot_image_dir]
521 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
524 def TestAuZip(buildroot, image_dir, extra_env=None):
525 """Run the script which validates an au-generator.zip.
528 buildroot: The buildroot of the current build.
529 image_dir: The directory in which to find au-generator.zip.
530 extra_env: A dictionary of environmental variables to set during generation.
533 failures_lib.BuildScriptFailure if the test script fails.
535 cmd = ['./build_library/test_au_zip.py', '-o', image_dir]
536 RunBuildScript(buildroot, cmd, cwd=constants.CROSUTILS_DIR,
540 def BuildVMImageForTesting(buildroot, board, extra_env=None):
541 cmd = ['./image_to_vm.sh', '--board=%s' % board, '--test_image']
542 RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
545 def RunTestImage(buildroot, board, image_dir, results_dir):
546 """Executes test_image on the produced image in |image_dir|.
548 The "test_image" script will be run as root in chroot. Running the script as
549 root will allow the tests to read normally-forbidden files such as those
550 owned by root. Running tests inside the chroot allows us to control
554 buildroot: The buildroot of the current build.
555 board: The board the image was built for.
556 image_dir: The directory in which to find the image.
557 results_dir: The directory to store result files.
560 failures_lib.BuildScriptFailure if the test script fails.
565 '--test_results_root', cros_build_lib.ToChrootPath(results_dir),
566 cros_build_lib.ToChrootPath(image_dir),
568 RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True,
572 def RunSignerTests(buildroot, board):
573 cmd = ['./security_test_image', '--board=%s' % board]
574 RunBuildScript(buildroot, cmd, enter_chroot=True)
577 def RunUnitTests(buildroot, board, full, blacklist=None, extra_env=None):
578 cmd = ['cros_run_unit_tests', '--board=%s' % board]
580 # If we aren't running ALL tests, then restrict to just the packages
581 # uprev noticed were changed.
583 package_file = _PACKAGE_FILE % {'buildroot': buildroot}
584 cmd += ['--package_file=%s' % git.ReinterpretPathForChroot(package_file)]
587 cmd += ['--blacklist_packages=%s' % ' '.join(blacklist)]
589 RunBuildScript(buildroot, cmd, enter_chroot=True, extra_env=extra_env or {})
592 def RunTestSuite(buildroot, board, image_dir, results_dir, test_type,
593 whitelist_chrome_crashes, archive_dir):
594 """Runs the test harness suite."""
595 results_dir_in_chroot = os.path.join(buildroot, 'chroot',
596 results_dir.lstrip('/'))
597 osutils.RmDir(results_dir_in_chroot, ignore_missing=True)
599 cwd = os.path.join(buildroot, 'src', 'scripts')
600 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
603 '--board=%s' % board,
606 '--target_image=%s' % image_path,
607 '--test_results_root=%s' % results_dir_in_chroot
610 if test_type not in constants.VALID_VM_TEST_TYPES:
611 raise AssertionError('Unrecognized test type %r' % test_type)
613 if test_type == constants.FULL_AU_TEST_TYPE:
614 cmd.append('--archive_dir=%s' % archive_dir)
616 cmd.append('--quick')
617 if test_type == constants.SMOKE_SUITE_TEST_TYPE:
618 cmd.append('--only_verify')
619 cmd.append('--suite=smoke')
620 elif test_type == constants.TELEMETRY_SUITE_TEST_TYPE:
621 cmd.append('--suite=telemetry_unit')
623 if whitelist_chrome_crashes:
624 cmd.append('--whitelist_chrome_crashes')
626 result = cros_build_lib.RunCommand(cmd, cwd=cwd, error_code_ok=True)
627 if result.returncode:
628 if os.path.exists(results_dir_in_chroot):
629 error = '%s exited with code %d' % (' '.join(cmd), result.returncode)
630 with open(results_dir_in_chroot + '/failed_test_command', 'w') as failed:
633 raise TestFailure('** VMTests failed with code %d **' % result.returncode)
636 def RunDevModeTest(buildroot, board, image_dir):
637 """Runs the dev mode testing script to verify dev-mode scripts work."""
638 crostestutils = os.path.join(buildroot, 'src', 'platform', 'crostestutils')
639 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
640 test_script = 'devmode-test/devinstall_test.py'
641 cmd = [os.path.join(crostestutils, test_script), '--verbose', board,
643 cros_build_lib.RunCommand(cmd)
646 def RunCrosVMTest(board, image_dir):
647 """Runs cros_vm_test script to verify cros flash/deploy works."""
648 image_path = os.path.join(image_dir, 'chromiumos_test_image.bin')
649 test = cros_vm_test.CrosCommandTest(board, image_path)
653 def ListFailedTests(results_path):
654 """Returns a list of failed tests.
656 Parse the test report logs from autotest to find failed tests.
659 results_path: Path to the directory of test results.
662 A lists of (test_name, relative/path/to/failed/tests)
664 # TODO: we don't have to parse the log to find failed tests once
665 # crbug.com/350520 is fixed.
667 for path, _, filenames in os.walk(results_path):
668 reports.extend([os.path.join(path, x) for x in filenames
669 if x == _TEST_REPORT_FILENAME])
673 for report in reports:
674 cros_build_lib.Info('Parsing test report %s', report)
675 # Format used in the report:
676 # /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
677 # 2_autotest_tests/results-01-security_OpenSSLBlacklist [ FAILED ]
678 # /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
679 # 2_autotest_tests/results-01-security_OpenSSLBlacklist/ \
680 # security_OpenBlacklist [ FAILED ]
681 with open(report) as f:
682 failed_re = re.compile(r'([\./\w-]*)\s*\[\s*(\S+?)\s*\]')
683 test_name_re = re.compile(r'results-[\d]+?-([\.\w_]*)')
685 r = failed_re.search(line)
686 if r and r.group(2) == _TEST_FAILED:
687 # Process only failed tests.
688 file_path = r.group(1)
689 match = test_name_re.search(file_path)
691 test_name = match.group(1)
693 # If no match is found (due to format change or other
694 # reasons), simply use the last component of file_path.
695 test_name = os.path.basename(file_path)
697 # A test may have subtests. We don't want to list all subtests.
698 if test_name not in processed_tests:
699 base_dirname = os.path.basename(results_path)
700 # Get the relative path from the test_results directory. Note
701 # that file_path is a chroot path, while results_path is a
702 # non-chroot path, so we cannot use os.path.relpath directly.
703 rel_path = file_path.split(base_dirname)[1].lstrip(os.path.sep)
704 failed_tests.append((test_name, rel_path))
705 processed_tests.append(test_name)
710 def GetTestResultsDir(buildroot, test_results_dir):
711 """Returns the test results directory located in chroot.
714 buildroot: Root directory where build occurs.
715 test_results_dir: Path from buildroot/chroot to find test results.
716 This must a subdir of /tmp.
718 test_results_dir = test_results_dir.lstrip('/')
719 return os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR, test_results_dir)
722 def ArchiveTestResults(results_path, archive_dir):
723 """Archives the test results to |archive_dir|.
726 results_path: Path to test results.
727 archive_dir: Local directory to archive to.
729 cros_build_lib.SudoRunCommand(['chmod', '-R', 'a+rw', results_path],
731 if os.path.exists(archive_dir):
732 osutils.RmDir(archive_dir)
734 def _ShouldIgnore(dirname, file_list):
735 # Note: We exclude VM disk and memory images. Instead, they are
736 # archived via ArchiveVMFiles. Also skip any symlinks. gsutil
737 # hangs on broken symlinks.
738 return [x for x in file_list if
739 x.startswith(constants.VM_DISK_PREFIX) or
740 x.startswith(constants.VM_MEM_PREFIX) or
741 os.path.islink(os.path.join(dirname, x))]
743 shutil.copytree(results_path, archive_dir, symlinks=False,
744 ignore=_ShouldIgnore)
747 def BuildAndArchiveTestResultsTarball(src_dir, buildroot):
748 """Create a compressed tarball of test results.
751 src_dir: The directory containing the test results.
752 buildroot: Build root directory.
755 The name of the tarball.
757 target = '%s.tgz' % src_dir.rstrip(os.path.sep)
758 chroot = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
759 cros_build_lib.CreateTarball(
760 target, src_dir, compression=cros_build_lib.COMP_GZIP,
762 return os.path.basename(target)
765 def ArchiveVMFiles(buildroot, test_results_dir, archive_path):
766 """Archives the VM memory and disk images into tarballs.
768 There may be multiple tests (e.g. SimpleTestUpdate and
769 SimpleTestUpdateAndVerify), and multiple files for each test (one
770 for the VM disk, and one for the VM memory). We create a separate
771 tar file for each of these files, so that each can be downloaded
775 buildroot: Build root directory.
776 test_results_dir: Path from buildroot/chroot to find test results.
777 This must a subdir of /tmp.
778 archive_path: Directory the tarballs should be written to.
781 The paths to the tarballs.
783 images_dir = os.path.join(buildroot, 'chroot', test_results_dir.lstrip('/'))
785 for path, _, filenames in os.walk(images_dir):
786 images.extend([os.path.join(path, filename) for filename in
787 fnmatch.filter(filenames, constants.VM_DISK_PREFIX + '*')])
788 images.extend([os.path.join(path, filename) for filename in
789 fnmatch.filter(filenames, constants.VM_MEM_PREFIX + '*')])
792 for image_path in images:
793 image_rel_path = os.path.relpath(image_path, images_dir)
794 image_parent_dir = os.path.dirname(image_path)
795 image_file = os.path.basename(image_path)
796 tarball_path = os.path.join(archive_path,
797 "%s.tar" % image_rel_path.replace('/', '_'))
798 # Note that tar will chdir to |image_parent_dir|, so that |image_file|
799 # is at the top-level of the tar file.
800 cros_build_lib.CreateTarball(tarball_path,
802 compression=cros_build_lib.COMP_BZIP2,
804 tar_files.append(tarball_path)
808 @failures_lib.SetFailureType(SuiteTimedOut, timeout_util.TimeoutError)
809 def RunHWTestSuite(build, suite, board, pool=None, num=None, file_bugs=None,
810 wait_for_results=None, priority=None, timeout_mins=None,
811 retry=None, minimum_duts=0, debug=True):
812 """Run the test suite in the Autotest lab.
815 build: The build is described as the bot_id and the build version.
816 e.g. x86-mario-release/R18-1655.0.0-a1-b1584.
817 suite: Name of the Autotest suite.
818 board: The board the test suite should be scheduled against.
819 pool: The pool of machines we should use to run the hw tests on.
820 num: Maximum number of devices to use when scheduling tests in the
822 file_bugs: File bugs on test failures for this suite run.
823 wait_for_results: If True, wait for autotest results before returning.
824 priority: Priority of this suite run.
825 timeout_mins: Timeout in minutes for the suite job and its sub-jobs.
826 retry: If True, will enable job-level retry. Only works when
827 wait_for_results is True.
828 minimum_duts: The minimum number of DUTs should be available in lab for the
829 suite job to be created. If it's set to 0, the check will be
831 debug: Whether we are in debug mode.
833 # TODO(scottz): RPC client option names are misnomers crosbug.com/26445.
834 cmd = [_AUTOTEST_RPC_CLIENT,
835 _AUTOTEST_RPC_HOSTNAME,
838 '--suite_name', suite,
841 # Add optional arguments to command, if present.
843 cmd += ['--pool', pool]
846 cmd += ['--num', str(num)]
848 if file_bugs is not None:
849 cmd += ['--file_bugs', str(file_bugs)]
851 if wait_for_results is not None:
852 cmd += ['--no_wait', str(not wait_for_results)]
854 if priority is not None:
855 cmd += ['--priority', priority]
857 if timeout_mins is not None:
858 cmd += ['--timeout_mins', str(timeout_mins)]
860 if retry is not None:
861 cmd += ['--retry', str(retry)]
863 if minimum_duts != 0:
864 cmd += ['--minimum_duts', str(minimum_duts)]
867 cros_build_lib.Info('RunHWTestSuite would run: %s',
868 cros_build_lib.CmdToStr(cmd))
870 if timeout_mins is None:
871 result = cros_build_lib.RunCommand(cmd, error_code_ok=True)
873 with timeout_util.Timeout(
874 timeout_mins * 60 + constants.HWTEST_TIMEOUT_EXTENSION):
875 result = cros_build_lib.RunCommand(cmd, error_code_ok=True)
877 # run_suite error codes:
878 # 0 - OK: Tests ran and passed.
879 # 1 - ERROR: Tests ran and failed (or timed out).
880 # 2 - WARNING: Tests ran and passed with warning(s). Note that 2
881 # may also be CLIENT_HTTP_CODE error returned by
882 # autotest_rpc_client.py. We ignore that case for now.
883 # 3 - INFRA_FAILURE: Tests did not complete due to lab issues.
884 # 4 - SUITE_TIMEOUT: Suite timed out. This could be caused by
885 # infrastructure failures or by test failures.
886 # 11, 12, 13 for cases when rpc is down, see autotest_rpc_errors.py.
887 lab_warning_codes = (2,)
888 infra_error_codes = (3, 11, 12, 13)
891 if result.returncode in lab_warning_codes:
892 raise TestWarning('** Suite passed with a warning code **')
893 elif result.returncode in infra_error_codes:
894 raise failures_lib.TestLabFailure(
895 '** HWTest did not complete due to infrastructure issues '
896 '(code %d) **' % result.returncode)
897 elif result.returncode in timeout_codes:
898 raise SuiteTimedOut('** Suite timed out before completion **')
899 elif result.returncode != 0:
900 raise TestFailure('** HWTest failed (code %d) **' % result.returncode)
903 def _GetAbortCQHWTestsURL(version, suite):
904 """Get the URL where we should save state about the specified abort command.
907 version: The version of the current build. E.g. R18-1655.0.0-rc1
908 suite: The suite argument that AbortCQHWTests was called with, if any.
910 url = '%s/hwtests-aborted/%s/suite=%s'
911 return url % (constants.MANIFEST_VERSIONS_GS_URL, version, suite)
914 def AbortCQHWTests(version, debug, suite=''):
915 """Abort the specified hardware tests on the commit queue.
918 version: The version of the current build. E.g. R18-1655.0.0-rc1
919 debug: Whether we are in debug mode.
920 suite: Name of the Autotest suite. If empty, abort all suites.
922 # Mark the substr/suite as aborted in Google Storage.
923 ctx = gs.GSContext(dry_run=debug)
924 ctx.Copy('-', _GetAbortCQHWTestsURL(version, suite), input='')
926 # Abort all jobs for the given version, containing the '-paladin' suffix.
927 # Example job id: link-paladin/R35-5542.0.0-rc1
928 substr = '%s/%s' % (cbuildbot_config.CONFIG_TYPE_PALADIN, version)
930 # Actually abort the build.
931 cmd = [_AUTOTEST_RPC_CLIENT,
932 _AUTOTEST_RPC_HOSTNAME,
937 cros_build_lib.Info('AbortCQHWTests would run: %s',
938 cros_build_lib.CmdToStr(cmd))
941 cros_build_lib.RunCommand(cmd)
942 except cros_build_lib.RunCommandError:
943 cros_build_lib.Warning('AbortCQHWTests failed', exc_info=True)
946 def HaveCQHWTestsBeenAborted(version, suite=''):
947 """Check in Google Storage whether the specified abort call was sent.
949 This function will return True if the following call has occurred:
950 AbortCQHWTests(version, debug=False, suite=suite)
953 version: The version of the current build. E.g. R18-1655.0.0-rc1
954 suite: The suite argument that AbortCQHWTests was called with, if any.
956 return gs.GSContext().Exists(_GetAbortCQHWTestsURL(version, suite))
959 def GenerateStackTraces(buildroot, board, test_results_dir,
960 archive_dir, got_symbols):
961 """Generates stack traces for logs in |gzipped_test_tarball|
964 buildroot: Root directory where build occurs.
965 board: Name of the board being worked on.
966 test_results_dir: Directory of the test results.
967 archive_dir: Local directory for archiving.
968 got_symbols: True if breakpad symbols have been generated.
971 List of stack trace file names.
973 stack_trace_filenames = []
974 asan_log_signaled = False
976 board_path = cros_build_lib.GetSysroot(board=board)
977 symbol_dir = os.path.join(board_path, 'usr', 'lib', 'debug', 'breakpad')
978 for curr_dir, _subdirs, files in os.walk(test_results_dir):
979 for curr_file in files:
980 full_file_path = os.path.join(curr_dir, curr_file)
981 processed_file_path = '%s.txt' % full_file_path
983 # Distinguish whether the current file is a minidump or asan_log.
984 if curr_file.endswith('.dmp'):
985 # Skip crash files that were purposely generated or if
986 # breakpad symbols are absent.
987 if not got_symbols or curr_file.find('crasher_nobreakpad') == 0:
989 # Precess the minidump from within chroot.
990 minidump = git.ReinterpretPathForChroot(full_file_path)
991 cwd = os.path.join(buildroot, 'src', 'scripts')
992 cros_build_lib.RunCommand(
993 ['minidump_stackwalk', minidump, symbol_dir], cwd=cwd,
994 enter_chroot=True, error_code_ok=True, redirect_stderr=True,
995 debug_level=logging.DEBUG, log_stdout_to_file=processed_file_path)
998 # Prepend '/chrome/$board' path to the stack trace in log.
1000 with open(full_file_path) as f:
1002 # Stack frame line example to be matched here:
1003 # #0 0x721d1831 (/opt/google/chrome/chrome+0xb837831)
1004 stackline_match = re.search(r'^ *#[0-9]* 0x.* \(', line)
1006 frame_end = stackline_match.span()[1]
1007 line = line[:frame_end] + board_path + line[frame_end:]
1009 # Symbolize and demangle it.
1010 raw = cros_build_lib.RunCommand(
1011 ['asan_symbolize.py'], input=log_content, enter_chroot=True,
1012 debug_level=logging.DEBUG, capture_output=True,
1013 extra_env={'LLVM_SYMBOLIZER_PATH' : '/usr/bin/llvm-symbolizer'})
1014 cros_build_lib.RunCommand(['c++filt'],
1015 input=raw.output, debug_level=logging.DEBUG,
1016 cwd=buildroot, redirect_stderr=True,
1017 log_stdout_to_file=processed_file_path)
1018 # Break the bot if asan_log found. This is because some asan
1019 # crashes may not fail any test so the bot stays green.
1020 # Ex: crbug.com/167497
1021 if not asan_log_signaled:
1022 asan_log_signaled = True
1023 cros_build_lib.Error(
1024 'Asan crash occurred. See asan_logs in Artifacts.')
1025 cros_build_lib.PrintBuildbotStepFailure()
1027 # Append the processed file to archive.
1028 filename = ArchiveFile(processed_file_path, archive_dir)
1029 stack_trace_filenames.append(filename)
1031 return stack_trace_filenames
1034 @failures_lib.SetFailureType(failures_lib.BuilderFailure)
1035 def ArchiveFile(file_to_archive, archive_dir):
1036 """Archives the specified file.
1039 file_to_archive: Full path to file to archive.
1040 archive_dir: Local directory for archiving.
1043 The base name of the archived file.
1045 filename = os.path.basename(file_to_archive)
1047 archived_file = os.path.join(archive_dir, filename)
1048 shutil.copy(file_to_archive, archived_file)
1049 os.chmod(archived_file, 0o644)
1054 def MarkChromeAsStable(buildroot,
1058 chrome_version=None):
1059 """Returns the portage atom for the revved chrome ebuild - see man emerge."""
1060 cwd = os.path.join(buildroot, 'src', 'scripts')
1064 command = ['../../chromite/bin/cros_mark_chrome_as_stable',
1065 '--tracking_branch=%s' % tracking_branch]
1067 command.append('--boards=%s' % ':'.join(boards))
1069 command.append('--force_version=%s' % chrome_version)
1071 portage_atom_string = cros_build_lib.RunCommand(
1072 command + [chrome_rev],
1074 redirect_stdout=True,
1076 chroot_args=chroot_args,
1077 extra_env=extra_env).output.rstrip()
1079 if portage_atom_string:
1080 chrome_atom = portage_atom_string.splitlines()[-1].partition('=')[-1]
1082 cros_build_lib.Info('Found nothing to rev.')
1085 for board in boards:
1086 # If we're using a version of Chrome other than the latest one, we need
1087 # to unmask it manually.
1088 if chrome_rev != constants.CHROME_REV_LATEST:
1089 keywords_file = CHROME_KEYWORDS_FILE % {'board': board}
1090 cros_build_lib.SudoRunCommand(
1091 ['mkdir', '-p', os.path.dirname(keywords_file)],
1092 enter_chroot=True, cwd=cwd)
1093 cros_build_lib.SudoRunCommand(
1094 ['tee', keywords_file], input='=%s\n' % chrome_atom,
1095 enter_chroot=True, cwd=cwd)
1097 # Sanity check: We should always be able to merge the version of
1098 # Chrome we just unmasked.
1099 result = cros_build_lib.RunCommand(
1100 ['emerge-%s' % board, '-p', '--quiet', '=%s' % chrome_atom],
1101 enter_chroot=True, error_code_ok=True, combine_stdout_stderr=True,
1102 capture_output=True)
1103 if result.returncode:
1104 cros_build_lib.PrintBuildbotStepWarnings()
1105 cros_build_lib.Warning('\n%s' % result.output)
1106 cros_build_lib.Warning('Cannot emerge-%s =%s\nIs Chrome pinned to an '
1107 'older version?' % (board, chrome_atom))
1113 def CleanupChromeKeywordsFile(boards, buildroot):
1114 """Cleans chrome uprev artifact if it exists."""
1115 for board in boards:
1116 keywords_path_in_chroot = CHROME_KEYWORDS_FILE % {'board': board}
1117 keywords_file = '%s/chroot%s' % (buildroot, keywords_path_in_chroot)
1118 if os.path.exists(keywords_file):
1119 cros_build_lib.SudoRunCommand(['rm', '-f', keywords_file])
1122 def UprevPackages(buildroot, boards, overlays, enter_chroot=True):
1123 """Uprevs non-browser chromium os packages that have changed."""
1124 drop_file = _PACKAGE_FILE % {'buildroot': buildroot}
1126 overlays = [git.ReinterpretPathForChroot(x) for x in overlays]
1127 drop_file = git.ReinterpretPathForChroot(drop_file)
1128 cmd = ['cros_mark_as_stable', '--all',
1129 '--boards=%s' % ':'.join(boards),
1130 '--overlays=%s' % ':'.join(overlays),
1131 '--drop_file=%s' % drop_file,
1133 RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=enter_chroot)
1136 def UprevPush(buildroot, overlays, dryrun):
1137 """Pushes uprev changes to the main line."""
1138 cmd = ['cros_mark_as_stable',
1139 '--srcroot=%s' % os.path.join(buildroot, 'src'),
1140 '--overlays=%s' % ':'.join(overlays)
1143 cmd.append('--dryrun')
1145 RunBuildScript(buildroot, cmd, chromite_cmd=True)
1148 def GenerateCPEExport(buildroot, board, useflags=None):
1149 """Generate CPE export.
1152 buildroot: The root directory where the build occurs.
1153 board: Board type that was built on this machine.
1154 useflags: A list of useflags for this build.
1157 A CommandResult object with the results of running the CPE
1160 cmd = ['cros_extract_deps', '--format=cpe', '--board=%s' % board,
1161 'virtual/target-os']
1164 env['USE'] = ' '.join(useflags)
1165 result = RunBuildScript(buildroot, cmd, enter_chroot=True,
1166 chromite_cmd=True, capture_output=True,
1171 def GenerateBreakpadSymbols(buildroot, board, debug):
1172 """Generate breakpad symbols.
1175 buildroot: The root directory where the build occurs.
1176 board: Board type that was built on this machine.
1177 debug: Include extra debugging output.
1179 # We don't care about firmware symbols.
1180 # See http://crbug.com/213670.
1181 exclude_dirs = ['firmware']
1183 cmd = ['cros_generate_breakpad_symbols', '--board=%s' % board,
1184 '--jobs=%s' % str(max([1, multiprocessing.cpu_count() / 2]))]
1185 cmd += ['--exclude-dir=%s' % x for x in exclude_dirs]
1188 RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True)
1191 def GenerateDebugTarball(buildroot, board, archive_path, gdb_symbols):
1192 """Generates a debug tarball in the archive_dir.
1195 buildroot: The root directory where the build occurs.
1196 board: Board type that was built on this machine
1197 archive_path: Directory where tarball should be stored.
1198 gdb_symbols: Include *.debug files for debugging core files with gdb.
1201 The filename of the created debug tarball.
1203 # Generate debug tarball. This needs to run as root because some of the
1204 # symbols are only readable by root.
1205 chroot = os.path.join(buildroot, 'chroot')
1206 board_dir = os.path.join(chroot, 'build', board, 'usr', 'lib')
1207 debug_tgz = os.path.join(archive_path, 'debug.tgz')
1212 extra_args = ['--exclude',
1213 os.path.join('debug', constants.AUTOTEST_BUILD_PATH),
1214 '--exclude', 'debug/tests']
1217 inputs = ['debug/breakpad']
1219 cros_build_lib.CreateTarball(
1220 debug_tgz, board_dir, sudo=True, compression=cros_build_lib.COMP_GZIP,
1221 chroot=chroot, inputs=inputs, extra_args=extra_args)
1223 # Fix permissions and ownership on debug tarball.
1224 cros_build_lib.SudoRunCommand(['chown', str(os.getuid()), debug_tgz])
1225 os.chmod(debug_tgz, 0o644)
1227 return os.path.basename(debug_tgz)
1230 def GenerateHtmlIndex(index, files, url_base=None, head=None, tail=None):
1231 """Generate a simple index.html file given a set of filenames
1234 index: The file to write the html index to.
1235 files: The list of files to create the index of. If a string, then it
1236 may be a path to a file (with one file per line), or a directory
1237 (which will be listed).
1238 url_base: The URL to prefix to all elements (otherwise they'll be relative).
1239 head: All the content before the listing. '<html><body>' if not specified.
1240 tail: All the content after the listing. '</body></html>' if not specified.
1242 def GenLink(target, name=None):
1245 return ('<li><a href="%s%s">%s</a></li>'
1246 % (url_base, target, name if name else target))
1248 if isinstance(files, (unicode, str)):
1249 if os.path.isdir(files):
1250 files = os.listdir(files)
1252 files = osutils.ReadFile(files).splitlines()
1253 url_base = url_base + '/' if url_base else ''
1256 head = '<html><body>'
1257 html = head + '<ul>'
1262 for a in sorted(set(files)):
1269 links.append(GenLink(*a))
1270 links.insert(0, GenLink(*dot_dot))
1271 links.insert(0, GenLink(*dot))
1272 html += '\n'.join(links)
1275 tail = '</body></html>'
1276 html += '</ul>' + tail
1278 osutils.WriteFile(index, html)
1281 def AppendToFile(file_path, string):
1282 """Append the string to the given file.
1284 This method provides atomic appends if the string is smaller than
1285 PIPE_BUF (> 512 bytes). It does not guarantee atomicity once the
1286 string is greater than that.
1289 file_path: File to be appended to.
1290 string: String to append to the file.
1292 osutils.WriteFile(file_path, string, mode='a')
1295 def UpdateUploadedList(last_uploaded, archive_path, upload_urls,
1297 """Updates the archive's UPLOADED file, and uploads it to Google Storage.
1300 buildroot: The root directory where the build occurs.
1301 last_uploaded: Filename of the last uploaded file.
1302 archive_path: Path to archive_dir.
1303 upload_urls: Iterable of GS locations where the UPLOADED file should be
1305 debug: Whether we are in debug mode.
1307 # Append to the uploaded list.
1308 filename = UPLOADED_LIST_FILENAME
1309 AppendToFile(os.path.join(archive_path, filename), last_uploaded + '\n')
1311 # Upload the updated list to Google Storage.
1312 UploadArchivedFile(archive_path, upload_urls, filename, debug,
1316 @failures_lib.SetFailureType(failures_lib.GSUploadFailure)
1317 def UploadArchivedFile(archive_path, upload_urls, filename, debug,
1318 update_list=False, timeout=2 * 60 * 60, acl=None):
1319 """Upload the specified file from the archive dir to Google Storage.
1322 archive_path: Path to archive dir.
1323 upload_urls: Iterable of GS locations where the UPLOADED file should be
1325 debug: Whether we are in debug mode.
1326 filename: Filename of the file to upload.
1327 update_list: Flag to update the list of uploaded files.
1328 timeout: Raise an exception if the upload takes longer than this timeout.
1329 acl: Canned gsutil acl to use (e.g. 'public-read'), otherwise the internal
1330 (private) one is used.
1332 local_path = os.path.join(archive_path, filename)
1333 gs_context = gs.GSContext(acl=acl, dry_run=debug)
1336 for upload_url in upload_urls:
1337 with timeout_util.Timeout(timeout):
1338 gs_context.CopyInto(local_path, upload_url, parallel=True,
1340 except timeout_util.TimeoutError:
1341 raise timeout_util.TimeoutError('Timed out uploading %s' % filename)
1343 # Update the list of uploaded files.
1345 UpdateUploadedList(filename, archive_path, upload_urls, debug)
1348 def UploadSymbols(buildroot, board, official, cnt, failed_list):
1349 """Upload debug symbols for this build."""
1350 log_cmd = ['upload_symbols', '--board', board]
1351 if failed_list is not None:
1352 log_cmd += ['--failed-list', str(failed_list)]
1354 log_cmd.append('--official_build')
1356 log_cmd += ['--upload-limit', str(cnt)]
1357 cros_build_lib.Info('Running: %s' % cros_build_lib.CmdToStr(log_cmd))
1359 ret = upload_symbols.UploadSymbols(
1360 board=board, official=official, upload_limit=cnt,
1361 root=os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR),
1362 failed_list=failed_list)
1364 # TODO(davidjames): Convert this to a fatal error.
1365 # See http://crbug.com/212437
1366 cros_build_lib.PrintBuildbotStepWarnings()
1369 def PushImages(board, archive_url, dryrun, profile, sign_types=()):
1370 """Push the generated image to the release bucket for signing."""
1371 # Log the equivalent command for debugging purposes.
1372 log_cmd = ['pushimage', '--board=%s' % board]
1375 log_cmd.append('-n')
1378 log_cmd.append('--profile=%s' % profile)
1381 log_cmd.append('--sign-types=%s' % ' '.join(sign_types))
1383 log_cmd.append(archive_url)
1384 cros_build_lib.Info('Running: %s' % cros_build_lib.CmdToStr(log_cmd))
1387 return pushimage.PushImage(archive_url, board, profile=profile,
1388 sign_types=sign_types, dry_run=dryrun)
1389 except pushimage.PushError as e:
1390 cros_build_lib.PrintBuildbotStepFailure()
1394 def BuildFactoryInstallImage(buildroot, board, extra_env):
1395 """Build a factory install image.
1398 buildroot: Root directory where build occurs.
1399 board: Board type that was built on this machine
1400 extra_env: Flags to be added to the environment for the new process.
1403 The basename of the symlink created for the image.
1406 # We use build_attempt=3 here to ensure that this image uses a different
1407 # output directory from our regular image and the factory test image.
1408 alias = _FACTORY_SHIM
1409 cmd = ['./build_image',
1410 '--board=%s' % board,
1412 '--symlink=%s' % alias,
1413 '--build_attempt=3',
1415 RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
1420 def MakeNetboot(buildroot, board, image_dir):
1421 """Build a netboot image.
1424 buildroot: Root directory where build occurs.
1425 board: Board type that was built on this machine.
1426 image_dir: Directory containing factory install shim.
1428 cmd = ['./make_netboot.sh',
1429 '--board=%s' % board,
1430 '--image_dir=%s' % git.ReinterpretPathForChroot(image_dir)]
1431 RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
1434 def MakeFactoryToolkit(buildroot, board, output_dir, version=None):
1435 """Build a factory toolkit.
1438 buildroot: Root directory where build occurs.
1439 board: Board type that was built on this machine.
1440 output_dir: Directory for the resulting factory toolkit.
1441 version: Version string to be included in ID string.
1443 cmd = ['./make_factory_toolkit.sh',
1444 '--board=%s' % board,
1445 '--output_dir=%s' % git.ReinterpretPathForChroot(output_dir)]
1446 if version is not None:
1447 cmd.extend(['--version', version])
1448 RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
1451 def BuildRecoveryImage(buildroot, board, image_dir, extra_env):
1452 """Build a recovery image.
1455 buildroot: Root directory where build occurs.
1456 board: Board type that was built on this machine.
1457 image_dir: Directory containing base image.
1458 extra_env: Flags to be added to the environment for the new process.
1460 image = os.path.join(image_dir, constants.BASE_IMAGE_BIN)
1461 cmd = ['./mod_image_for_recovery.sh',
1462 '--board=%s' % board,
1463 '--image=%s' % git.ReinterpretPathForChroot(image)]
1464 RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
1468 def BuildTarball(buildroot, input_list, tarball_output, cwd=None,
1469 compressed=True, **kwargs):
1470 """Tars and zips files and directories from input_list to tarball_output.
1473 buildroot: Root directory where build occurs.
1474 input_list: A list of files and directories to be archived.
1475 tarball_output: Path of output tar archive file.
1476 cwd: Current working directory when tar command is executed.
1477 compressed: Whether or not the tarball should be compressed with pbzip2.
1478 **kwargs: Keyword arguments to pass to CreateTarball.
1481 Return value of cros_build_lib.CreateTarball.
1483 compressor = cros_build_lib.COMP_NONE
1486 compressor = cros_build_lib.COMP_BZIP2
1487 chroot = os.path.join(buildroot, 'chroot')
1488 return cros_build_lib.CreateTarball(
1489 tarball_output, cwd, compression=compressor, chroot=chroot,
1490 inputs=input_list, **kwargs)
1493 def FindFilesWithPattern(pattern, target='./', cwd=os.curdir):
1494 """Search the root directory recursively for matching filenames.
1497 pattern: the pattern used to match the filenames.
1498 target: the target directory to search.
1499 cwd: current working directory.
1502 A list of paths of the matched files.
1504 # Backup the current working directory before changing it
1505 old_cwd = os.getcwd()
1509 for target, _, filenames in os.walk(target):
1510 for filename in fnmatch.filter(filenames, pattern):
1511 matches.append(os.path.join(target, filename))
1513 # Restore the working directory
1518 def BuildAUTestTarball(buildroot, board, work_dir, version, archive_url):
1519 """Tar up the au test artifacts into the tarball_dir.
1522 buildroot: Root directory where build occurs.
1523 board: Board type that was built on this machine.
1524 work_dir: Location for doing work.
1525 version: Basic version of the build i.e. 3289.23.0.
1526 archive_url: GS directory where we uploaded payloads.
1528 au_test_tarball = os.path.join(work_dir, 'au_control.tar.bz2')
1530 cwd = os.path.join(buildroot, 'src', 'third_party', 'autotest', 'files')
1531 control_files_subdir = os.path.join('autotest', 'au_control_files')
1533 autotest_dir = os.path.join(work_dir, control_files_subdir)
1534 os.makedirs(autotest_dir)
1536 # Get basic version without R*.
1537 basic_version = re.search('R[0-9]+-([0-9][\w.]+)', version).group(1)
1539 # Pass in the python paths to the libs full release test needs.
1541 chromite_path=buildroot,
1542 devserver_path=os.path.join(buildroot, 'src', 'platform', 'dev'))
1544 python_path = '%(chromite_path)s:%(devserver_path)s' % env_dict
1545 cmd = ['site_utils/autoupdate/full_release_test.py',
1546 '--npo', '--nmo', '--dump',
1547 '--dump_dir', autotest_dir, '--archive_url', archive_url,
1548 basic_version, board, '--log=debug']
1550 gs_context_dir = os.path.dirname(gs.GSContext.GetDefaultGSUtilBin())
1552 if not gs_context_dir in os.environ['PATH']:
1553 run_env = os.environ.copy()
1554 run_env['PATH'] += ':%s' % gs_context_dir
1556 run_env = os.environ
1558 run_env.setdefault('PYTHONPATH', '')
1559 run_env['PYTHONPATH'] += ':%s' % python_path
1561 cros_build_lib.RunCommand(cmd, env=run_env, cwd=cwd)
1562 BuildTarball(buildroot, [control_files_subdir], au_test_tarball, cwd=work_dir)
1563 return au_test_tarball
1566 def BuildFullAutotestTarball(buildroot, board, tarball_dir):
1567 """Tar up the full autotest directory into image_dir.
1570 buildroot: Root directory where build occurs.
1571 board: Board type that was built on this machine.
1572 tarball_dir: Location for storing autotest tarballs.
1575 A tuple the path of the full autotest tarball.
1578 tarball = os.path.join(tarball_dir, 'autotest.tar.bz2')
1579 cwd = os.path.abspath(os.path.join(buildroot, 'chroot', 'build', board,
1580 constants.AUTOTEST_BUILD_PATH, '..'))
1581 result = BuildTarball(buildroot, ['autotest'], tarball, cwd=cwd,
1584 # Emerging the autotest package to the factory test image while this is
1585 # running modifies the timestamp on /build/autotest/server by
1586 # adding a tmp directory underneath it.
1587 # When tar spots this, it flags this and returns
1588 # status code 1. The tarball is still OK, although there might be a few
1589 # unneeded (and garbled) tmp files. If tar fails in a different way, it'll
1590 # return an error code other than 1.
1591 # TODO: Fix the autotest ebuild. See http://crbug.com/237537
1592 if result.returncode not in (0, 1):
1593 raise Exception('Autotest tarball creation failed with exit code %s'
1594 % (result.returncode))
1599 def BuildImageZip(archive_dir, image_dir):
1600 """Build image.zip in archive_dir from contents of image_dir.
1602 Exclude the dev image from the zipfile.
1605 archive_dir: Directory to store image.zip.
1606 image_dir: Directory to zip up.
1609 The basename of the zipfile.
1611 filename = 'image.zip'
1612 zipfile = os.path.join(archive_dir, filename)
1613 cros_build_lib.RunCommand(['zip', zipfile, '-r', '.'], cwd=image_dir,
1614 capture_output=True)
1618 def BuildStandaloneArchive(archive_dir, image_dir, artifact_info):
1619 """Create a compressed archive from the specified image information.
1621 The artifact info is derived from a JSON file in the board overlay. It
1622 should be in the following format:
1630 Each artifact can contain the following keys:
1631 input - Required. A list of paths and globs that expands to
1632 the list of files to archive.
1633 output - the name of the archive to be created. If omitted,
1634 it will default to the first filename, stripped of
1635 extensions, plus the appropriate .tar.gz or other suffix.
1636 archive - "tar" or "zip". If omitted, files will be uploaded
1637 directly, without being archived together.
1638 compress - a value cros_build_lib.CompressionStrToType knows about. Only
1639 useful for tar. If omitted, an uncompressed tar will be created.
1642 archive_dir: Directory to store image zip.
1643 image_dir: Base path for all inputs.
1644 artifact_info: Extended archive configuration dictionary containing:
1645 - paths - required, list of files to archive.
1646 - output, archive & compress entries from the JSON file.
1649 The base name of the archive.
1652 A ValueError if the compression or archive values are unknown.
1653 A KeyError is a required field is missing from artifact_info.
1655 if 'archive' not in artifact_info:
1656 # Nothing to do, just return the list as-is.
1657 return artifact_info['paths']
1659 inputs = artifact_info['paths']
1660 archive = artifact_info['archive']
1661 compress = artifact_info.get('compress')
1662 compress_type = cros_build_lib.CompressionStrToType(compress)
1663 if compress_type is None:
1664 raise ValueError('unknown compression type: %s' % compress)
1666 # If the output is fixed, use that. Otherwise, construct it
1667 # from the name of the first archived file, stripping extensions.
1668 filename = artifact_info.get(
1669 'output', '%s.%s' % (os.path.splitext(inputs[0])[0], archive))
1670 if archive == 'tar':
1671 # Add the .compress extension if we don't have a fixed name.
1672 if 'output' not in artifact_info and compress:
1673 filename = "%s.%s" % (filename, compress)
1674 extra_env = { 'XZ_OPT' : '-1' }
1675 cros_build_lib.CreateTarball(
1676 os.path.join(archive_dir, filename), image_dir,
1677 inputs=inputs, compression=compress_type, extra_env=extra_env)
1678 elif archive == 'zip':
1679 cros_build_lib.RunCommand(
1680 ['zip', os.path.join(archive_dir, filename), '-r'] + inputs,
1681 cwd=image_dir, capture_output=True)
1683 raise ValueError('unknown archive type: %s' % archive)
1688 def BuildFirmwareArchive(buildroot, board, archive_dir):
1689 """Build firmware_from_source.tar.bz2 in archive_dir from build root.
1692 buildroot: Root directory where build occurs.
1693 board: Board name of build target.
1694 archive_dir: Directory to store output file.
1697 The basename of the archived file, or None if the target board does
1698 not have firmware from source.
1700 firmware_root = os.path.join(buildroot, 'chroot', 'build', board, 'firmware')
1701 source_list = [os.path.relpath(f, firmware_root)
1702 for f in glob.iglob(os.path.join(firmware_root, '*'))]
1706 archive_name = 'firmware_from_source.tar.bz2'
1707 archive_file = os.path.join(archive_dir, archive_name)
1708 BuildTarball(buildroot, source_list, archive_file, cwd=firmware_root)
1712 def BuildFactoryZip(buildroot, board, archive_dir, factory_shim_dir,
1713 factory_toolkit_dir, version=None):
1714 """Build factory_image.zip in archive_dir.
1717 buildroot: Root directory where build occurs.
1718 board: Board name of build target.
1719 archive_dir: Directory to store factory_image.zip.
1720 factory_shim_dir: Directory containing factory shim.
1721 factory_toolkit_dir: Directory containing factory toolkit.
1722 version: The version string to be included in the factory image.zip.
1725 The basename of the zipfile.
1727 filename = 'factory_image.zip'
1729 # Creates a staging temporary folder.
1730 temp_dir = tempfile.mkdtemp(prefix='cbuildbot_factory')
1732 zipfile = os.path.join(archive_dir, filename)
1733 cmd = ['zip', '-r', zipfile, '.']
1735 # Rules for archive: { folder: pattern }
1738 ['*factory_install*.bin', '*partition*', os.path.join('netboot', '*')],
1739 factory_toolkit_dir:
1740 ['*factory_image*.bin', '*partition*', 'install_factory_toolkit.run'],
1743 for folder, patterns in rules.items():
1744 if not folder or not os.path.exists(folder):
1746 basename = os.path.basename(folder)
1747 target = os.path.join(temp_dir, basename)
1748 os.symlink(folder, target)
1749 for pattern in patterns:
1750 cmd.extend(['--include', os.path.join(basename, pattern)])
1752 # Everything in /usr/local/factory/bundle gets overlaid into the
1754 bundle_src_dir = os.path.join(
1755 buildroot, 'chroot', 'build', board, 'usr', 'local', 'factory', 'bundle')
1756 if os.path.exists(bundle_src_dir):
1757 for f in os.listdir(bundle_src_dir):
1758 src_path = os.path.join(bundle_src_dir, f)
1759 os.symlink(src_path, os.path.join(temp_dir, f))
1760 cmd.extend(['--include',
1761 f if os.path.isfile(src_path) else
1762 os.path.join(f, '*')])
1764 # Add a version file in the zip file.
1765 if version is not None:
1766 version_file = os.path.join(temp_dir, 'BUILD_VERSION')
1767 osutils.WriteFile(version_file, version)
1768 cmd.extend(['--include', version_file])
1770 cros_build_lib.RunCommand(cmd, cwd=temp_dir, capture_output=True)
1771 osutils.RmDir(temp_dir)
1775 def ArchiveHWQual(buildroot, hwqual_name, archive_dir, image_dir):
1776 """Create a hwqual tarball in archive_dir.
1779 buildroot: Root directory where build occurs.
1780 hwqual_name: Name for tarball.
1781 archive_dir: Local directory for hwqual tarball.
1782 image_dir: Directory containing test image.
1784 scripts_dir = os.path.join(buildroot, 'src', 'scripts')
1785 cmd = [os.path.join(scripts_dir, 'archive_hwqual'),
1786 '--from', archive_dir,
1787 '--image_dir', image_dir,
1788 '--output_tag', hwqual_name]
1789 cros_build_lib.RunCommand(cmd, capture_output=True)
1790 return '%s.tar.bz2' % hwqual_name
1793 def CreateTestRoot(build_root):
1794 """Returns a temporary directory for test results in chroot.
1797 The path inside the chroot rather than whole path.
1799 # Create test directory within tmp in chroot.
1800 chroot = os.path.join(build_root, 'chroot')
1801 chroot_tmp = os.path.join(chroot, 'tmp')
1802 test_root = tempfile.mkdtemp(prefix='cbuildbot', dir=chroot_tmp)
1804 # Path inside chroot.
1805 return os.path.sep + os.path.relpath(test_root, start=chroot)
1808 def GeneratePayloads(build_root, target_image_path, archive_dir):
1809 """Generates the payloads for hw testing.
1812 build_root: The root of the chromium os checkout.
1813 target_image_path: The path to the image to generate payloads to.
1814 archive_dir: Where to store payloads we generated.
1816 real_target = os.path.realpath(target_image_path)
1817 # The path to the target should look something like this:
1818 # .../link/R37-5952.0.2014_06_12_2302-a1/chromiumos_test_image.bin
1819 board, os_version = real_target.split('/')[-3:-1]
1823 cwd = os.path.join(build_root, 'src', 'scripts')
1824 path = git.ReinterpretPathForChroot(
1825 os.path.join(build_root, 'src', 'platform', 'dev', 'host'))
1826 chroot_dir = os.path.join(build_root, 'chroot')
1827 chroot_tmp = os.path.join(chroot_dir, 'tmp')
1828 chroot_target = git.ReinterpretPathForChroot(target_image_path)
1830 with osutils.TempDir(base_dir=chroot_tmp,
1831 prefix='generate_payloads') as temp_dir:
1832 chroot_temp_dir = temp_dir.replace(chroot_dir, '', 1)
1835 os.path.join(path, 'cros_generate_update_payload'),
1836 '--image', chroot_target,
1837 '--output', os.path.join(chroot_temp_dir, 'update.gz')
1839 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1840 name = '_'.join([prefix, os_version, board, 'full', suffix])
1841 # Names for full payloads look something like this:
1842 # chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin
1843 shutil.move(os.path.join(temp_dir, 'update.gz'),
1844 os.path.join(archive_dir, name))
1846 cmd.extend(['--src_image', chroot_target])
1847 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1848 # Names for delta payloads look something like this:
1849 # chromeos_R37-5952.0.2014_06_12_2302-a1_R37-
1850 # 5952.0.2014_06_12_2302-a1_link_delta_dev.bin
1851 name = '_'.join([prefix, os_version, os_version, board, 'delta', suffix])
1852 shutil.move(os.path.join(temp_dir, 'update.gz'),
1853 os.path.join(archive_dir, name))
1856 os.path.join(path, 'cros_generate_stateful_update_payload'),
1857 '--image', chroot_target,
1858 '--output', chroot_temp_dir
1860 cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
1861 shutil.move(os.path.join(temp_dir, STATEFUL_FILE),
1862 os.path.join(archive_dir, STATEFUL_FILE))
1865 def GetChromeLKGM(revision):
1866 """Returns the ChromeOS LKGM from Chrome given the git revision."""
1867 revision = revision or 'refs/heads/master'
1868 lkgm_url_path = '%s/+/%s/%s?format=text' % (
1869 constants.CHROMIUM_SRC_PROJECT, revision, constants.PATH_TO_CHROME_LKGM)
1870 contents_b64 = gob_util.FetchUrl(constants.EXTERNAL_GOB_HOST, lkgm_url_path)
1871 return base64.b64decode(contents_b64.read()).strip()
1874 def SyncChrome(build_root, chrome_root, useflags, tag=None, revision=None):
1878 build_root: The root of the chromium os checkout.
1879 chrome_root: The directory where chrome is stored.
1880 useflags: Array of use flags.
1881 tag: If supplied, the Chrome tag to sync.
1882 revision: If supplied, the Chrome revision to sync.
1884 # --reset tells sync_chrome to blow away local changes and to feel
1885 # free to delete any directories that get in the way of syncing. This
1886 # is needed for unattended operation.
1887 sync_chrome = os.path.join(build_root, 'chromite', 'bin', 'sync_chrome')
1888 internal = constants.USE_CHROME_INTERNAL in useflags
1889 cmd = [sync_chrome, '--reset']
1890 cmd += ['--internal'] if internal else []
1891 cmd += ['--tag', tag] if tag is not None else []
1892 cmd += ['--revision', revision] if revision is not None else []
1893 cmd += [chrome_root]
1894 retry_util.RunCommandWithRetries(constants.SYNC_RETRIES, cmd, cwd=build_root)
1897 def PatchChrome(chrome_root, patch, subdir):
1898 """Apply a patch to Chrome.
1901 chrome_root: The directory where chrome is stored.
1902 patch: Rietveld issue number to apply.
1903 subdir: Subdirectory to apply patch in.
1905 cmd = ['apply_issue', '-i', patch]
1906 cros_build_lib.RunCommand(cmd, cwd=os.path.join(chrome_root, subdir))
1909 class ChromeSDK(object):
1910 """Wrapper for the 'cros chrome-sdk' command."""
1912 DEFAULT_TARGETS = ('chrome', 'chrome_sandbox', 'nacl_helper',)
1914 DEFAULT_JOBS_GOMA = 500
1916 def __init__(self, cwd, board, extra_args=None, chrome_src=None, goma=False,
1917 debug_log=True, cache_dir=None, target_tc=None,
1918 toolchain_url=None):
1922 cwd: Where to invoke 'cros chrome-sdk'.
1923 board: The board to run chrome-sdk for.
1924 extra_args: Extra args to pass in on the command line.
1925 chrome_src: Path to pass in with --chrome-src.
1926 goma: If True, run using goma.
1927 debug_log: If set, run with debug log-level.
1928 cache_dir: Specify non-default cache directory.
1929 target_tc: Override target toolchain.
1930 toolchain_url: Override toolchain url pattern.
1934 self.extra_args = extra_args or []
1936 self.extra_args += ['--chrome-src', chrome_src]
1939 self.extra_args.append('--nogoma')
1940 self.debug_log = debug_log
1941 self.cache_dir = cache_dir
1942 self.target_tc = target_tc
1943 self.toolchain_url = toolchain_url
1945 def Run(self, cmd, extra_args=None):
1946 """Run a command inside the chrome-sdk context."""
1949 cros_cmd += ['--log-level', 'debug']
1951 cros_cmd += ['--cache-dir', self.cache_dir]
1953 self.extra_args += ['--target-tc', self.target_tc]
1954 if self.toolchain_url:
1955 self.extra_args += ['--toolchain-url', self.toolchain_url]
1956 cros_cmd += ['chrome-sdk', '--board', self.board] + self.extra_args
1957 cros_cmd += (extra_args or []) + ['--'] + cmd
1958 cros_build_lib.RunCommand(cros_cmd, cwd=self.cwd)
1960 def Ninja(self, jobs=None, debug=False, targets=DEFAULT_TARGETS):
1961 """Run 'ninja' inside a chrome-sdk context.
1964 jobs: The number of -j jobs to run.
1965 debug: Whether to do a Debug build (defaults to Release).
1966 targets: The targets to compile.
1969 jobs = self.DEFAULT_JOBS_GOMA if self.goma else self.DEFAULT_JOBS
1970 flavor = 'Debug' if debug else 'Release'
1971 cmd = ['ninja', '-C', 'out_%s/%s' % (self.board, flavor) , '-j', str(jobs)]
1972 self.Run(cmd + list(targets))