1 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """PayGen - Automatic Payload Generation.
7 This library processes a single build at a time, and decides which payloads
8 need to be generated. It then calls paygen_payload to generate each payload.
10 This library is reponsible for locking builds during processing, and checking
11 and setting flags to show that a build has been processed.
14 from __future__ import print_function
26 fixup_path.FixupPath()
28 from chromite.cbuildbot import commands
29 from chromite.cbuildbot import cbuildbot_config
30 from chromite.lib import cros_build_lib
31 from chromite.lib import parallel
32 from chromite.lib import retry_util
33 from chromite.lib.paygen import download_cache
34 from chromite.lib.paygen import dryrun_lib
35 from chromite.lib.paygen import gslib
36 from chromite.lib.paygen import gslock
37 from chromite.lib.paygen import gspaths
38 from chromite.lib.paygen import paygen_payload_lib
39 from chromite.lib.paygen import urilib
40 from chromite.lib.paygen import utils
42 # If we are an external only checkout, or a bootstrap environemnt these imports
43 # will fail. We quietly ignore the failure, but leave bombs around that will
44 # explode if people try to really use this library.
46 from crostools.config import config
47 from crostools.omaha import query
49 # pylint: disable-msg=F0401
50 from site_utils.autoupdate.lib import test_params
51 from site_utils.autoupdate.lib import test_control
52 # pylint: enable-msg=F0401
61 # The oldest release milestone for which run_suite should be attempted.
62 RUN_SUITE_MIN_MSTONE = 30
64 # Used to format timestamps on archived paygen.log file names in GS.
65 PAYGEN_LOG_TIMESTAMP_FORMAT = '%Y%m%d-%H%M%S-UTC'
68 class Error(Exception):
69 """Exception base class for this module."""
72 class EarlyExit(Error):
73 """Base class for paygen_build 'normal' errors.
75 There are a number of cases in which a paygen run fails for reasons that
76 require special reporting, but which are normal enough to avoid raising
77 big alarms. We signal these results using exceptions derived from this
80 Note that the docs strings on the subclasses may be displayed directly
81 to the user, and RESULT may be returned as an exit code.
85 """Return the doc string to the user as the exception description."""
89 class BuildFinished(EarlyExit):
90 """This build has already been marked as finished, no need to process."""
94 class BuildLocked(EarlyExit):
95 """This build is locked and already being processed elsewhere."""
99 class BuildSkip(EarlyExit):
100 """This build has been marked as skip, and should not be processed."""
104 class BuildNotReady(EarlyExit):
105 """Not all images for this build are uploaded, don't process it yet."""
109 class BoardNotConfigured(EarlyExit):
110 """The board does not exist in the crostools release config."""
113 class BuildCorrupt(Error):
114 """Exception raised if a build has unexpected images."""
117 class ImageMissing(Error):
118 """Exception raised if a build doesn't have expected images."""
121 class PayloadTestError(Error):
122 """Raised when an error is encountered with generation of test artifacts."""
125 class ArchiveError(Error):
126 """Raised when there was a failure to map a build to the images archive."""
129 def _LogList(title, obj_list):
130 """Helper for logging a list of objects.
139 title: Title string for the list.
140 obj_list: List of objects to convert to string and log, one per line.
142 logging.info('%s:', title)
145 logging.info(' (no objects listed)')
152 logging.info(' %2d: %s', index, obj)
155 def _FilterForImages(artifacts):
156 """Return only instances of Image from a list of artifacts."""
157 return [a for a in artifacts if isinstance(a, gspaths.Image)]
160 def _FilterForMp(artifacts):
161 """Return the MP keyed images in a list of artifacts.
163 This returns all images with key names of the form "mp", "mp-v3", etc.
166 artifacts: The list of artifacts to filter.
171 return [i for i in _FilterForImages(artifacts) if i.key.startswith('mp')]
174 def _FilterForPremp(artifacts):
175 """Return the PreMp keyed images in a list of artifacts.
177 The key for an images is expected to be of the form "premp", "mp", or
178 "mp-vX". This filter returns everything that is "premp".
181 artifacts: The list of artifacts to filter.
184 List of PreMP images.
186 return [i for i in _FilterForImages(artifacts) if i.key == 'premp']
189 def _FilterForBasic(artifacts):
190 """Return the basic (not NPO) images in a list of artifacts.
192 As an example, an image for a stable channel build might be in the
193 "stable-channel", or it might be in the "npo-channel". This only returns
194 the basic images that match "stable-channel".
197 artifacts: The list of artifacts to filter.
200 List of basic images.
202 return [i for i in _FilterForImages(artifacts) if i.image_channel is None]
205 def _FilterForNpo(artifacts):
206 """Return the NPO images in a list of artifacts.
208 Return the N Plus One images in the given list.
211 artifacts: The list of artifacts to filter.
216 return [i for i in _FilterForImages(artifacts)
217 if i.image_channel == 'nplusone-channel']
220 def _FilterForUnsignedImageArchives(artifacts):
221 """Return only instances of UnsignedImageArchive from a list of artifacts."""
222 return [i for i in artifacts if isinstance(i, gspaths.UnsignedImageArchive)]
225 def _FilterForTest(artifacts):
226 """Return only test images archives."""
227 return [i for i in _FilterForUnsignedImageArchives(artifacts)
228 if i.image_type == 'test']
231 def _GenerateSinglePayload(payload, work_dir, sign, dry_run):
232 """Generate a single payload.
234 This is intended to be safe to call inside a new process.
237 payload: gspath.Payload object defining the payloads to generate.
238 work_dir: Working directory for payload generation.
239 sign: boolean to decide if payload should be signed.
240 dry_run: boolean saying if this is a dry run.
242 # This cache dir will be shared with other processes, but we need our
243 # own instance of the cache manager to properly coordinate.
244 cache_dir = paygen_payload_lib.FindCacheDir(work_dir)
245 with download_cache.DownloadCache(
246 cache_dir, cache_size=_PaygenBuild.CACHE_SIZE) as cache:
247 # Actually generate the payload.
248 paygen_payload_lib.CreateAndUploadPayload(
256 class _PaygenBuild(object):
257 """This class is responsible for generating the payloads for a given build.
259 It operates across a single build at a time, and is responsible for locking
260 that build and for flagging it as finished when all payloads are generated.
263 # 5 Gig bytes of cache.
264 CACHE_SIZE = 5 * 1024 * 1024 * 1024
266 # Relative subpath for dumping control files inside the temp directory.
267 CONTROL_FILE_SUBDIR = os.path.join('autotest', 'au_control_files')
269 # The name of the suite of paygen-generated Autotest tests.
270 PAYGEN_AU_SUITE_TEMPLATE = 'paygen_au_%s'
272 # Name of the Autotest control file tarball.
273 CONTROL_TARBALL_TEMPLATE = PAYGEN_AU_SUITE_TEMPLATE + '_control.tar.bz2'
275 # Sleep time used in _DiscoverRequiredPayloads. Export so tests can change.
276 BUILD_DISCOVER_RETRY_SLEEP = 90
278 # Cache of full test payloads for a given version.
279 _version_to_full_test_payloads = {}
281 class PayloadTest(object):
282 """A payload test definition.
285 payload: A gspaths.Payload object describing the payload to be tested.
286 src_version: The build version the payload needs to be applied to; None
287 for a delta payload, as it already encodes the source version.
290 def __init__(self, payload, src_version=None):
291 self.payload = payload
292 self.src_version = src_version
295 return ('<test for %s%s>' %
297 (' from version %s' % self.src_version)
298 if self.src_version else ''))
303 def __init__(self, build, work_dir, dry_run=False, ignore_finished=False,
304 skip_full_payloads=False, skip_delta_payloads=False,
305 skip_test_payloads=False, skip_nontest_payloads=False,
306 control_dir=None, output_dir=None,
307 run_parallel=False, run_on_builder=False):
310 self._work_dir = work_dir
311 self._drm = dryrun_lib.DryRunMgr(dry_run)
312 self._ignore_finished = dryrun_lib.DryRunMgr(ignore_finished)
313 self._skip_full_payloads = skip_full_payloads
314 self._skip_delta_payloads = skip_delta_payloads
315 self._skip_test_payloads = skip_test_payloads
316 self._skip_nontest_payloads = skip_nontest_payloads
317 self._control_dir = control_dir
318 self._output_dir = output_dir
319 self._previous_version = None
320 self._run_parallel = run_parallel
321 self._run_on_builder = run_on_builder
322 self._archive_board = None
323 self._archive_build = None
324 self._archive_build_uri = None
326 def _GetFlagURI(self, flag):
327 """Find the URI of the lock file associated with this build.
330 flag: Should be a member of gspaths.ChromeosReleases.FLAGS
333 Returns a google storage path to the build flag requested.
335 return gspaths.ChromeosReleases.BuildPayloadsFlagUri(
336 self._build.channel, self._build.board, self._build.version, flag,
337 bucket=self._build.bucket)
340 def _MapToArchive(cls, board, version):
341 """Returns the chromeos-image-archive equivalents for the build.
344 board: The board name (per chromeos-releases).
345 version: The build version.
348 A tuple consisting of the archive board name, build name and build URI.
351 ArchiveError: if we could not compute the mapping.
353 # Map chromeos-releases board name to its chromeos-image-archive equivalent.
354 cfg_iter = itertools.chain(*cbuildbot_config.FindFullConfigsForBoard())
355 archive_board_candidates = set([
356 archive_board for cfg in cfg_iter for archive_board in cfg['boards']
357 if archive_board.replace('_', '-') == board])
358 if len(archive_board_candidates) == 0:
359 raise ArchiveError('could not find build board name for %s' % board)
360 elif len(archive_board_candidates) > 1:
361 raise ArchiveError('found multiple build board names for %s: %s' %
362 (board, ', '.join(archive_board_candidates)))
364 archive_board = archive_board_candidates.pop()
366 # Find something in the respective chromeos-image-archive build directory.
367 archive_build_search_uri = gspaths.ChromeosImageArchive.BuildUri(
368 archive_board, '*', version)
369 archive_build_file_uri_list = urilib.ListFiles(archive_build_search_uri)
370 if not archive_build_file_uri_list:
371 raise ArchiveError('cannot find archive build directory for %s' %
372 archive_build_search_uri)
374 # Use the first search result.
375 uri_parts = urlparse.urlsplit(archive_build_file_uri_list[0])
376 archive_build_path = os.path.dirname(uri_parts.path)
377 archive_build = archive_build_path.strip('/')
378 archive_build_uri = urlparse.urlunsplit((uri_parts.scheme,
383 return archive_board, archive_build, archive_build_uri
385 def _ValidateExpectedBuildImages(self, build, images):
386 """Validate that we got the expected images for a build.
388 We expect that for any given build will have at most the following four
396 We also expect that it will have at least one basic build, and never have
397 an NPO build for which it doesn't have a matching basic build.
400 build: The build the images are from.
401 images: The images discovered associated with the build.
404 BuildCorrupt: Raised if unexpected images are found.
405 ImageMissing: Raised if expected images are missing.
408 premp_basic = _FilterForBasic(_FilterForPremp(images))
409 premp_npo = _FilterForNpo(_FilterForPremp(images))
410 mp_basic = _FilterForBasic(_FilterForMp(images))
411 mp_npo = _FilterForNpo(_FilterForMp(images))
413 # Make sure there is no more than one of each of our basic types.
414 for i in (premp_basic, premp_npo, mp_basic, mp_npo):
416 msg = '%s has unexpected filtered images: %s.' % (build, i)
417 raise BuildCorrupt(msg)
419 # Make sure there were no unexpected types of images.
420 if len(images) != len(premp_basic + premp_npo + mp_basic + mp_npo):
421 msg = '%s has unexpected unfiltered images: %s' % (build, images)
422 raise BuildCorrupt(msg)
424 # Make sure there is at least one basic image.
425 if not premp_basic and not mp_basic:
426 msg = '%s has no basic images.' % build
427 raise ImageMissing(msg)
429 # Can't have a premp NPO with the match basic image.
430 if premp_npo and not premp_basic:
431 msg = '%s has a premp NPO, but not a premp basic image.' % build
432 raise ImageMissing(msg)
434 # Can't have an mp NPO with the match basic image.
435 if mp_npo and not mp_basic:
436 msg = '%s has a mp NPO, but not a mp basic image.' % build
437 raise ImageMissing(msg)
439 def _DiscoverImages(self, build):
440 """Return a list of images associated with a given build.
443 build: The build to find images for.
446 A list of images associated with the build. This may include premp, mp,
447 and premp/mp NPO images. We don't currently ever expect more than these
448 four combinations to be present.
451 BuildCorrupt: Raised if unexpected images are found.
452 ImageMissing: Raised if expected images are missing.
454 search_uri = gspaths.ChromeosReleases.ImageUri(
455 build.channel, build.board, build.version, key='*', image_channel='*',
456 image_version='*', bucket=build.bucket)
458 image_uris = urilib.ListFiles(search_uri)
459 images = [gspaths.ChromeosReleases.ParseImageUri(uri) for uri in image_uris]
461 # Unparsable URIs will result in Nones; filter them out.
462 images = [i for i in images if i]
464 self._ValidateExpectedBuildImages(build, images)
468 def _DiscoverTestImageArchives(self, build):
469 """Return a list of unsigned image archives associated with a given build.
472 build: The build to find images for.
475 A list of test image archives associated with the build. Normally, there
476 should be exactly one such item.
479 BuildCorrupt: Raised if unexpected images are found.
480 ImageMissing: Raised if expected images are missing.
482 search_uri = gspaths.ChromeosReleases.UnsignedImageArchiveUri(
483 build.channel, build.board, build.version, milestone='*',
484 image_type='test', bucket=build.bucket)
486 image_uris = urilib.ListFiles(search_uri)
487 images = [gspaths.ChromeosReleases.ParseUnsignedImageArchiveUri(uri)
488 for uri in image_uris]
490 # Unparsable URIs will result in Nones; filter them out.
491 images = [i for i in images if i]
493 # Make sure we found the expected number of build images (1).
495 raise BuildCorrupt('%s has multiple test images: %s' % (build, images))
497 if self._control_dir and len(images) < 1:
498 raise ImageMissing('%s has no test image' % build)
502 def _DiscoverFsiBuilds(self):
503 """Read fsi_images in release.conf.
505 fsi_images is a list of chromeos versions. We assume each one is
506 from the same build/channel as we are and use it to identify a new
507 build. The values in release.conf are only valid for the stable-channel.
510 List of gspaths.Build instances for each build so discovered. The list
513 # FSI versions are only defined for the stable-channel.
514 if self._build.channel != 'stable-channel':
518 fsi_versions = config.GetListValue(self._build.board, 'fsi_images')
519 except ConfigParser.NoOptionError:
520 # fsi_images is an optional field.
524 for version in fsi_versions:
525 results.append(gspaths.Build(version=version,
526 board=self._build.board,
527 channel=self._build.channel,
528 bucket=self._build.bucket))
531 def _DiscoverNmoBuild(self):
532 """Find the currently published version to our channel/board.
534 We assume it was actually built with our current channel/board. This also
535 updates an object member with the previous build, in the case that
536 subsequent logic needs to make use of this knowledge.
539 List of gspaths.Build for previously published builds. Since we can only
540 know about the currently published version, this always contain zero or
543 self._previous_version = query.FindLatestPublished(self._build.channel,
546 if self._previous_version:
547 return [gspaths.Build(gspaths.Build(version=self._previous_version,
548 board=self._build.board,
549 channel=self._build.channel,
550 bucket=self._build.bucket))]
554 def _DiscoverRequiredFullPayloads(self, images):
555 """Find the Payload objects for the images from the current build.
557 In practice, this creates a full payload definition for every image passed
561 images: The images for the current build.
564 A list of gspaths.Payload objects for full payloads for every image.
566 return [gspaths.Payload(tgt_image=i) for i in images]
568 def _DiscoverRequiredNpoDeltas(self, images):
569 """Find the NPO deltas for the images from the current build.
571 Images from the current build, already filtered to be all MP or all PREMP.
574 images: The key-filtered images for the current build.
577 A list of gspaths.Payload objects for the deltas needed for NPO testing.
580 basics = _FilterForBasic(images)
581 npos = _FilterForNpo(images)
583 # If previously filtered for premp, and filtered for npo, there can only
585 assert len(basics) <= 1, 'Unexpected images found %s' % basics
586 assert len(npos) <= 1, 'Unexpected NPO images found %s' % npos
589 return [gspaths.Payload(tgt_image=npos[0], src_image=basics[0])]
593 # TODO(garnold) The reason we need this separately from
594 # _DiscoverRequiredNpoDeltas is that, with test images, we generate
595 # a current -> current delta rather than a real current -> NPO one (there are
596 # no test NPO images generated, unfortunately). Also, the naming of signed
597 # images is different from that of test image archives, so we need different
598 # filtering logic. In all likelihood, we will stop generating NPO deltas with
599 # signed images once this feature stabilizes; at this point, there will no
600 # longer be any use for a signed NPO.
601 def _DiscoverRequiredTestNpoDeltas(self, images):
602 """Find the NPO deltas test-equivalent for images from the current build.
605 images: The pre-filtered test images for the current build.
608 A (possibly empty) list of gspaths.Payload objects representing NPO
609 deltas of test images.
611 # If previously filtered for test images, there must be at most one image.
612 assert len(images) <= 1, 'Unexpected test images found %s' % images
615 return [gspaths.Payload(tgt_image=images[0], src_image=images[0])]
619 def _DiscoverRequiredFromPreviousDeltas(self, images, previous_images):
620 """Find the deltas from previous builds.
622 All arguements should already be filtered to be all MP or all PREMP.
625 images: The key-filtered images for the current build.
626 previous_images: The key-filtered images from previous builds from
627 which delta payloads should be generated.
630 A list of gspaths.Payload objects for the deltas needed from the previous
631 builds, which may be empty.
633 # If we have no images to delta to, no results.
637 # After filtering for NPO, and for MP/PREMP, there can be only one!
638 assert len(images) == 1, 'Unexpected images found %s.' % images
643 # We should never generate downgrades, they are unsafe. Deltas to the
644 # same images are useless. Neither case normally happens unless
645 # we are re-generating payloads for old builds.
646 for prev in previous_images:
647 if gspaths.VersionGreater(image.version, prev.version):
648 # A delta from each previous image to current image.
649 results.append(gspaths.Payload(tgt_image=image, src_image=prev))
651 logging.info('Skipping %s is not older than target', prev)
655 def _DiscoverRequiredPayloads(self):
656 """Find the payload definitions for the current build.
658 This method finds the images for the current build, and for all builds we
659 need deltas from, and decides what payloads are needed.
661 IMPORTANT: The order in which payloads are listed is significant as it
662 reflects on the payload generation order. The current way is to list test
663 payloads last, as they are of lesser importance from the release process
664 standpoint, and may incur failures that do not affect the signed payloads
665 and may be otherwise detrimental to the release schedule.
668 A list of tuples of the form (payload, skip), where payload is an
669 instance of gspath.Payload and skip is a Boolean that says whether it
670 should be skipped (i.e. not generated).
673 BuildNotReady: If the current build doesn't seem to have all of it's
674 images available yet. This commonly happens because the signer hasn't
675 finished signing the current build.
676 BuildCorrupt: If current or previous builds have unexpected images.
677 ImageMissing: Raised if expected images are missing for previous builds.
679 # Initiate a list that will contain lists of payload subsets, along with a
680 # Boolean stating whether or not we need to skip generating them.
681 payload_sublists_skip = []
684 # When discovering the images for our current build, they might
685 # discoverable right away (GS eventual consistency). So, we retry.
686 images = retry_util.RetryException(ImageMissing, 3,
687 self._DiscoverImages, self._build,
688 sleep=self.BUILD_DISCOVER_RETRY_SLEEP)
689 images += self._DiscoverTestImageArchives(self._build)
690 except ImageMissing as e:
691 # If the main build doesn't have the final build images, then it's
694 raise BuildNotReady()
696 _LogList('Images found', images)
698 # Discover the previous builds we need deltas from.
699 previous_builds = self._DiscoverNmoBuild()
701 _LogList('Previous builds considered', previous_builds)
703 logging.info('No previous builds found')
705 # Discover FSI builds we need deltas from, but omit those that were already
706 # discovered as previous builds.
707 fsi_builds = [b for b in self._DiscoverFsiBuilds()
708 if b not in previous_builds]
710 _LogList('FSI builds considered', fsi_builds)
712 logging.info('No FSI builds found')
714 # Discover the images from those previous builds, and put them into
715 # a single list. Raises ImageMissing if no images are found.
717 for b in previous_builds:
719 previous_images += self._DiscoverImages(b)
720 except ImageMissing as e:
721 # Temporarily allow generation of delta payloads to fail because of
722 # a missing previous build until crbug.com/243916 is addressed.
723 # TODO(mtennant): Remove this when bug is fixed properly.
724 logging.warning('Previous build image is missing, skipping: %s', e)
726 # We also clear the previous version field so that subsequent code does
727 # not attempt to generate a full update test from the N-1 version;
728 # since this version has missing images, no payloads were generated for
729 # it and test generation is bound to fail.
730 # TODO(garnold) This should be reversed together with the rest of this
732 self._previous_version = None
734 # In this case, we should also skip test image discovery; since no
735 # signed deltas will be generated from this build, we don't need to
736 # generate test deltas from it.
739 previous_images += self._DiscoverTestImageArchives(b)
742 previous_images += self._DiscoverImages(b)
743 previous_images += self._DiscoverTestImageArchives(b)
745 # Only consider base (signed) and test previous images.
746 filtered_previous_images = _FilterForBasic(previous_images)
747 filtered_previous_images += _FilterForTest(previous_images)
748 previous_images = filtered_previous_images
750 # Generate full payloads for all non-test images in the current build.
751 # Include base, NPO, premp, and mp (if present).
752 payload_sublists_skip.append(
753 (self._skip_full_payloads or self._skip_nontest_payloads,
754 self._DiscoverRequiredFullPayloads(_FilterForImages(images))))
756 # Deltas for current -> NPO (pre-MP and MP).
757 payload_sublists_skip.append(
758 (self._skip_delta_payloads or self._skip_nontest_payloads,
759 self._DiscoverRequiredNpoDeltas(_FilterForPremp(images))))
760 payload_sublists_skip.append(
761 (self._skip_delta_payloads or self._skip_nontest_payloads,
762 self._DiscoverRequiredNpoDeltas(_FilterForMp(images))))
764 # Deltas for previous -> current (pre-MP and MP).
765 payload_sublists_skip.append(
766 (self._skip_delta_payloads or self._skip_nontest_payloads,
767 self._DiscoverRequiredFromPreviousDeltas(
768 _FilterForPremp(_FilterForBasic(images)),
769 _FilterForPremp(previous_images))))
770 payload_sublists_skip.append(
771 (self._skip_delta_payloads or self._skip_nontest_payloads,
772 self._DiscoverRequiredFromPreviousDeltas(
773 _FilterForMp(_FilterForBasic(images)),
774 _FilterForMp(previous_images))))
776 # Full test payloads.
777 payload_sublists_skip.append(
778 (self._skip_full_payloads or self._skip_test_payloads,
779 self._DiscoverRequiredFullPayloads(_FilterForTest(images))))
781 # Delta for current -> NPO (test payloads).
782 payload_sublists_skip.append(
783 (self._skip_delta_payloads or self._skip_test_payloads,
784 self._DiscoverRequiredTestNpoDeltas(_FilterForTest(images))))
786 # Deltas for previous -> current (test payloads).
787 payload_sublists_skip.append(
788 (self._skip_delta_payloads or self._skip_test_payloads,
789 self._DiscoverRequiredFromPreviousDeltas(
790 _FilterForTest(images), _FilterForTest(previous_images))))
792 # Organize everything into a single list of (payload, skip) pairs; also, be
793 # sure to fill in a URL for each payload.
795 for (do_skip, payloads) in payload_sublists_skip:
796 for payload in payloads:
797 paygen_payload_lib.FillInPayloadUri(payload)
798 payloads_skip.append((payload, do_skip))
802 def _GeneratePayloads(self, payloads, lock=None):
803 """Generate the payloads called for by a list of payload definitions.
805 It will keep going, even if there is a failure.
808 payloads: gspath.Payload objects defining all of the payloads to generate.
809 lock: gslock protecting this paygen_build run.
812 Any arbitrary exception raised by CreateAndUploadPayload.
814 payloads_args = [(payload,
816 isinstance(payload.tgt_image, gspaths.Image),
818 for payload in payloads]
820 if self._run_parallel:
821 parallel.RunTasksInProcessPool(_GenerateSinglePayload, payloads_args)
823 for args in payloads_args:
824 _GenerateSinglePayload(*args)
826 # This can raise LockNotAcquired, if the lock timed out during a
827 # single payload generation.
831 def _FindFullTestPayloads(self, version):
832 """Returns a list of full test payloads for a given version.
834 Uses the current build's channel, board and bucket values. This method
835 caches the full test payloads previously discovered as we may be using them
836 for multiple tests in a single run.
839 version: A build version whose payloads to look for.
842 A (possibly empty) list of payload URIs.
844 if version in self._version_to_full_test_payloads:
845 return self._version_to_full_test_payloads[version]
847 payload_search_uri = gspaths.ChromeosReleases.PayloadUri(
848 self._build.channel, self._build.board, version, '*',
849 bucket=self._build.bucket)
850 full_test_payloads = [u for u in urilib.ListFiles(payload_search_uri)
851 if not u.endswith('.log')]
852 self._version_to_full_test_payloads[version] = full_test_payloads
853 return full_test_payloads
855 def _EmitControlFile(self, payload_test, suite_name, control_dump_dir):
856 """Emit an Autotest control file for a given payload test."""
857 # Figure out the source version for the test.
858 payload = payload_test.payload
859 src_version = payload_test.src_version
861 if not payload.src_image:
862 raise PayloadTestError(
863 'no source version provided for testing full payload %s' %
866 src_version = payload.src_image.version
868 # Discover the full test payload that corresponds to the source version.
869 src_payload_uri_list = self._FindFullTestPayloads(src_version)
870 if not src_payload_uri_list:
871 logging.error('Cannot find full test payload for source version (%s), '
872 'control file not generated', src_version)
873 raise PayloadTestError('cannot find source payload for testing %s' %
876 if len(src_payload_uri_list) != 1:
877 logging.error('Found multiple (%d) full test payloads for source version '
878 '(%s), control file not generated:\n%s',
879 len(src_payload_uri_list), src_version,
880 '\n'.join(src_payload_uri_list))
881 raise PayloadTestError('multiple source payloads found for testing %s' %
884 src_payload_uri = src_payload_uri_list[0]
885 logging.info('Source full test payload found at %s', src_payload_uri)
887 # Find the chromeos_image_archive location of the source build.
889 _, _, source_archive_uri = self._MapToArchive(
890 payload.tgt_image.board, src_version)
891 except ArchiveError as e:
892 raise PayloadTestError(
893 'error mapping source build to images archive: %s' % e)
895 test = test_params.TestConfig(
897 suite_name, # Name of the test (use the suite name).
898 False, # Using test images.
899 bool(payload.src_image), # Whether this is a delta.
901 payload.tgt_image.version,
904 suite_name=suite_name,
905 source_archive_uri=source_archive_uri)
907 with open(test_control.get_control_file_name()) as f:
908 control_code = f.read()
909 control_file = test_control.dump_autotest_control_file(
910 test, None, control_code, control_dump_dir)
911 logging.info('Control file emitted at %s', control_file)
913 def _ScheduleAutotestTests(self, suite_name):
914 """Run the appropriate command to schedule the Autotests we have prepped.
917 suite_name: The name of the test suite.
919 timeout_mins = cbuildbot_config.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
920 if self._run_on_builder:
922 commands.RunHWTestSuite(board=self._archive_board,
923 build=self._archive_build,
928 wait_for_results=True,
929 timeout_mins=timeout_mins,
930 debug=bool(self._drm))
931 except commands.TestWarning as e:
932 logging.warning('Warning running test suite; error output:\n%s', e)
935 os.path.join(fixup_path.CROS_AUTOTEST_PATH, 'site_utils',
937 '--board', self._archive_board,
938 '--build', self._archive_build,
939 '--suite_name', suite_name,
940 '--file_bugs', 'True',
943 '--timeout_mins', str(timeout_mins),
944 '--no_wait', 'False',
946 logging.info('Running autotest suite: %s', ' '.join(cmd))
947 cmd_result = utils.RunCommand(cmd, error_ok=True, redirect_stdout=True,
948 redirect_stderr=True, return_result=True)
949 if cmd_result.returncode:
950 logging.error('Error (%d) running test suite; error output:\n%s',
951 cmd_result.returncode, cmd_result.error)
952 raise PayloadTestError('failed to run test (return code %d)' %
953 cmd_result.returncode)
955 def _AutotestPayloads(self, payload_tests):
956 """Create necessary test artifacts and initiate Autotest runs.
959 payload_tests: An iterable of PayloadTest objects defining payload tests.
961 # Create inner hierarchy for dumping Autotest control files.
962 control_dump_dir = os.path.join(self._control_dir,
963 self.CONTROL_FILE_SUBDIR)
964 os.makedirs(control_dump_dir)
966 # Customize the test suite's name based on this build's channel.
967 test_channel = self._build.channel.rpartition('-')[0]
968 suite_name = (self.PAYGEN_AU_SUITE_TEMPLATE % test_channel)
970 # Emit a control file for each payload.
971 logging.info('Emitting control files into %s', control_dump_dir)
972 for payload_test in payload_tests:
973 self._EmitControlFile(payload_test, suite_name, control_dump_dir)
975 tarball_name = self.CONTROL_TARBALL_TEMPLATE % test_channel
977 # Must use an absolute tarball path since tar is run in a different cwd.
978 tarball_path = os.path.join(self._control_dir, tarball_name)
980 # Create the tarball.
981 logging.info('Packing %s in %s into %s', self.CONTROL_FILE_SUBDIR,
982 self._control_dir, tarball_path)
983 cmd_result = cros_build_lib.CreateTarball(
984 tarball_path, self._control_dir,
985 compression=cros_build_lib.COMP_BZIP2,
986 inputs=[self.CONTROL_FILE_SUBDIR])
987 if cmd_result.returncode != 0:
988 logging.error('Error (%d) when tarring control files',
989 cmd_result.returncode)
990 raise PayloadTestError(
991 'failed to create autotest tarball (return code %d)' %
992 cmd_result.returncode)
994 # Upload the tarball, be sure to make it world-readable.
995 upload_target = os.path.join(self._archive_build_uri, tarball_name)
996 logging.info('Uploading autotest control tarball to %s', upload_target)
997 gslib.Copy(tarball_path, upload_target, acl='public-read')
999 # Do not run the suite for older builds whose suite staging logic is
1000 # broken. We use the build's milestone number as a rough estimate to
1001 # whether or not it's recent enough. We derive the milestone number from
1002 # the archive build name, which takes the form
1003 # boardname-release/R12-3456.78.9 (in this case it is 12).
1005 build_mstone = int(self._archive_build.partition('/')[2]
1006 .partition('-')[0][1:])
1007 if build_mstone < RUN_SUITE_MIN_MSTONE:
1008 logging.warning('Build milestone < %s, test suite scheduling skipped',
1009 RUN_SUITE_MIN_MSTONE)
1012 raise PayloadTestError(
1013 'Failed to infer archive build milestone number (%s)' %
1014 self._archive_build)
1016 # Actually have the tests run.
1017 self._ScheduleAutotestTests(suite_name)
1020 def _IsTestDeltaPayload(payload):
1021 """Returns True iff a given payload is a test delta one."""
1022 return (payload.tgt_image.get('image_type', 'signed') != 'signed' and
1023 payload.src_image is not None)
1025 def _CreatePayloadTests(self, payloads):
1026 """Returns a list of test configurations for a given list of payloads.
1029 payloads: A list of (already generated) build payloads.
1032 A list of PayloadTest objects defining payload test cases.
1035 for payload in payloads:
1036 # We are only testing test payloads.
1037 if payload.tgt_image.get('image_type', 'signed') == 'signed':
1040 # Distinguish between delta (source version encoded) and full payloads.
1041 if payload.src_image is None:
1042 # Create a full update test from NMO.
1043 if self._previous_version:
1044 payload_tests.append(self.PayloadTest(
1045 payload, src_version=self._previous_version))
1047 logging.warn('No previous build, not testing full update %s from '
1050 # Create a full update test from the current version to itself.
1051 payload_tests.append(self.PayloadTest(
1052 payload, src_version=self._build.version))
1054 # Create a delta update test.
1055 payload_tests.append(self.PayloadTest(payload))
1057 return payload_tests
1059 def _CleanupBuild(self):
1060 """Clean up any leaked temp files associated with this build in GS."""
1061 # Clean up any signer client files that leaked on this or previous
1063 self._drm(gslib.Remove,
1064 gspaths.ChromeosReleases.BuildPayloadsSigningUri(
1065 self._build.channel, self._build.board, self._build.version,
1066 bucket=self._build.bucket),
1067 recurse=True, ignore_no_match=True)
1069 def CreatePayloads(self):
1070 """Get lock on this build, and Process if we succeed.
1072 While holding the lock, check assorted build flags to see if we should
1076 BuildSkip: If the build was marked with a skip flag.
1077 BuildFinished: If the build was already marked as finished.
1078 BuildLocked: If the build is locked by another server or process.
1080 lock_uri = self._GetFlagURI(gspaths.ChromeosReleases.LOCK)
1081 skip_uri = self._GetFlagURI(gspaths.ChromeosReleases.SKIP)
1082 finished_uri = self._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
1084 logging.info('Examining: %s', self._build)
1087 with gslock.Lock(lock_uri, dry_run=bool(self._drm)) as build_lock:
1088 # If the build was marked to skip, skip
1089 if gslib.Exists(skip_uri):
1092 # If the build was already marked as finished, we're finished.
1093 if self._ignore_finished(gslib.Exists, finished_uri):
1094 raise BuildFinished()
1096 logging.info('Starting: %s', self._build)
1098 payloads_skip = self._DiscoverRequiredPayloads()
1100 # Assume we can finish the build until we find a reason we can't.
1103 if self._output_dir:
1106 # Find out which payloads already exist, updating the payload object's
1107 # URI accordingly. In doing so we're creating a list of all payload
1108 # objects and their skip/exist attributes. We're also recording whether
1109 # this run will be skipping any actual work.
1111 for payload, skip in payloads_skip:
1112 if self._output_dir:
1113 # output_dir means we are forcing all payloads to be generated
1114 # with a new destination.
1115 result = [os.path.join(self._output_dir,
1116 os.path.basename(payload.uri))]
1119 result = paygen_payload_lib.FindExistingPayloads(payload)
1120 exists = bool(result)
1123 paygen_payload_lib.SetPayloadUri(payload, result[0])
1127 payloads_attrs.append((payload, skip, exists))
1129 # Display payload generation list, including payload name and whether
1130 # or not it already exists or will be skipped.
1132 for payload, skip, exists in payloads_attrs:
1137 desc += ' (skipped)'
1138 log_items.append(desc)
1140 _LogList('All payloads for the build', log_items)
1142 # Generate new payloads.
1143 new_payloads = [payload for payload, skip, exists in payloads_attrs
1144 if not (skip or exists)]
1146 logging.info('Generating %d new payload(s)', len(new_payloads))
1147 self._GeneratePayloads(new_payloads, build_lock)
1149 logging.info('No new payloads to generate')
1152 if not self._control_dir:
1153 logging.info('Payload autotesting skipped')
1155 elif not can_finish:
1156 logging.warning('Not all payloads were generated/uploaded, '
1157 'skipping payload autotesting.')
1160 # Check that the build has a corresponding archive directory. If it
1161 # does not, then testing should not be attempted.
1162 archive_board, archive_build, archive_build_uri = (
1163 self._MapToArchive(self._build.board, self._build.version))
1164 self._archive_board = archive_board
1165 self._archive_build = archive_build
1166 self._archive_build_uri = archive_build_uri
1168 # We have a control file directory and all payloads have been
1169 # generated. Lets create the list of tests to conduct.
1170 payload_tests = self._CreatePayloadTests(
1171 [payload for payload, _, _ in payloads_attrs])
1173 logging.info('Initiating %d payload tests', len(payload_tests))
1174 self._drm(self._AutotestPayloads, payload_tests)
1175 except ArchiveError as e:
1176 logging.warning('Cannot map build to images archive, skipping '
1177 'payload autotesting.')
1180 self._CleanupBuild()
1182 self._drm(gslib.CreateWithContents, finished_uri,
1183 socket.gethostname())
1185 logging.warning('Not all payloads were generated, uploaded or '
1186 'tested; not marking build as finished')
1188 logging.info('Finished: %s', self._build)
1190 except gslock.LockNotAcquired as e:
1191 logging.info('Build already being processed: %s', e)
1195 logging.info('Nothing done: %s', self._build)
1199 logging.error('Failed: %s', self._build)
1203 def _FindControlFileDir(work_dir):
1204 """Decide the directory for emitting control files.
1206 If a working directory is passed in, we create a unique directory inside
1207 it; other use /tmp (Python's default).
1210 work_dir: Create the control file directory here (None for /tmp).
1213 Path to a unique directory that the caller is responsible for cleaning up.
1215 # Setup assorted working directories.
1216 # It is safe for multiple parallel instances of paygen_payload to share the
1217 # same working directory.
1218 if work_dir and not os.path.exists(work_dir):
1219 os.makedirs(work_dir)
1221 # If work_dir is None, then mkdtemp will use '/tmp'
1222 return tempfile.mkdtemp(prefix='paygen_build-control_files.', dir=work_dir)
1225 def ValidateBoardConfig(board):
1226 """Validate that we have config values for the specified |board|.
1229 board: Name of board to check.
1232 BoardNotConfigured if the board is unknown.
1234 # Right now, we just validate that the board exists.
1235 if board not in config.GetCompleteBoardSet():
1236 raise BoardNotConfigured(board)
1239 def CreatePayloads(build, work_dir, dry_run=False, ignore_finished=False,
1240 skip_full_payloads=False, skip_delta_payloads=False,
1241 skip_test_payloads=False, skip_nontest_payloads=False,
1242 skip_autotest=False, output_dir=None, run_parallel=False,
1243 run_on_builder=False):
1244 """Helper method than generates payloads for a given build.
1247 build: gspaths.Build instance describing the build to generate payloads for.
1248 work_dir: Directory to contain both scratch and long-term work files.
1249 dry_run: Do not generate payloads (optional).
1250 ignore_finished: Ignore the FINISHED flag (optional).
1251 skip_full_payloads: Do not generate full payloads.
1252 skip_delta_payloads: Do not generate delta payloads.
1253 skip_test_payloads: Do not generate test payloads.
1254 skip_nontest_payloads: Do not generate non-test payloads.
1255 skip_autotest: Do not generate test artifacts or run tests.
1256 output_dir: Directory for payload files, or None for GS default locations.
1257 run_parallel: Generate payloads in parallel processes.
1258 run_on_builder: Running in a cbuildbot environment on a builder.
1260 ValidateBoardConfig(build.board)
1264 if not skip_autotest:
1265 control_dir = _FindControlFileDir(work_dir)
1267 _PaygenBuild(build, work_dir, dry_run=dry_run,
1268 ignore_finished=ignore_finished,
1269 skip_full_payloads=skip_full_payloads,
1270 skip_delta_payloads=skip_delta_payloads,
1271 skip_test_payloads=skip_test_payloads,
1272 skip_nontest_payloads=skip_nontest_payloads,
1273 control_dir=control_dir, output_dir=output_dir,
1274 run_parallel=run_parallel,
1275 run_on_builder=run_on_builder).CreatePayloads()
1279 shutil.rmtree(control_dir)