1 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Hold the functions that do the real work generating payloads."""
7 from __future__ import print_function
18 fixup_path.FixupPath()
20 from chromite.lib import cros_build_lib
21 from chromite.lib import osutils
22 from chromite.lib.paygen import dryrun_lib
23 from chromite.lib.paygen import filelib
24 from chromite.lib.paygen import gspaths
25 from chromite.lib.paygen import signer_payloads_client
26 from chromite.lib.paygen import urilib
27 from chromite.lib.paygen import utils
29 # If we are a bootstrap environment, this import will fail.
30 # We quietly ignore the failure, but leave bombs around that will
31 # explode if people try to really use this library.
33 from dev.host.lib import update_payload
36 logging.exception('update_payload import failed. Normal during bootstrap.')
39 class Error(Exception):
40 """Base class for payload generation errors."""
43 class UnexpectedSignerResultsError(Error):
44 """This is raised when signer results don't match our expectations."""
47 class PayloadVerificationError(Error):
48 """Raised when the generated payload fails to verify."""
51 class _PaygenPayload(object):
52 """Class to manage the process of generating and signing a payload."""
54 # GeneratorUri uses these to ensure we don't use generators that are too
55 # old to be supported.
56 MINIMUM_GENERATOR_VERSION = '6270.0.0'
57 MINIMUM_GENERATOR_URI = (
58 'gs://chromeos-releases/canary-channel/x86-mario/%s/au-generator.zip' %
59 MINIMUM_GENERATOR_VERSION)
61 # What keys do we sign payloads with, and what size are they?
62 PAYLOAD_SIGNATURE_KEYSETS = ('update_signer',)
63 PAYLOAD_SIGNATURE_SIZES_BYTES = (2048 / 8,) # aka 2048 bits in bytes.
65 TEST_IMAGE_NAME = 'chromiumos_test_image.bin'
66 RECOVERY_IMAGE_NAME = 'chromiumos_recovery_image.bin'
68 # Default names used by cros_generate_update_payload for extracting old/new
69 # kernel/rootfs partitions.
70 _DEFAULT_OLD_KERN_PART = 'old_kern.dat'
71 _DEFAULT_OLD_ROOT_PART = 'old_root.dat'
72 _DEFAULT_NEW_KERN_PART = 'new_kern.dat'
73 _DEFAULT_NEW_ROOT_PART = 'new_root.dat'
75 # TODO(garnold)(chromium:243559) stop using these constants once we start
76 # embedding partition sizes in payloads.
77 _DEFAULT_ROOTFS_PART_SIZE = 2 * 1024 * 1024 * 1024
78 _DEFAULT_KERNEL_PART_SIZE = 16 * 1024 * 1024
80 def __init__(self, payload, cache, work_dir, sign, verify,
81 au_generator_uri_override, dry_run=False):
82 """Init for _PaygenPayload.
85 payload: An instance of gspaths.Payload describing the payload to
87 cache: An instance of DownloadCache for retrieving files.
88 work_dir: A working directory for output files. Can NOT be shared.
89 sign: Boolean saying if the payload should be signed (normally, you do).
90 verify: whether the payload should be verified after being generated
91 au_generator_uri_override: URI to override standard au_generator.zip
93 dry_run: do not do any actual work
95 self.payload = payload
97 self.work_dir = work_dir
99 self._au_generator_uri_override = au_generator_uri_override
100 self._drm = dryrun_lib.DryRunMgr(dry_run)
102 self.generator_dir = os.path.join(work_dir, 'au-generator')
103 self.src_image_file = os.path.join(work_dir, 'src_image.bin')
104 self.tgt_image_file = os.path.join(work_dir, 'tgt_image.bin')
106 self.payload_file = os.path.join(work_dir, 'delta.bin')
107 self.delta_log_file = os.path.join(work_dir, 'delta.log')
111 if self._verify and update_payload is None:
112 # TODO(dgarrett): Change to a hard failure after crbug.com/415027 fixed.
113 logging.error('Verification disabled because update_payload unavailable.')
117 self.signed_payload_file = self.payload_file + '.signed'
118 self.metadata_signature_file = self._MetadataUri(self.signed_payload_file)
120 self.signer = signer_payloads_client.SignerPayloadsClientGoogleStorage(
121 payload.tgt_image.channel,
122 payload.tgt_image.board,
123 payload.tgt_image.version)
125 def _MetadataUri(self, uri):
126 """Given a payload uri, find the uri for the metadata signature."""
127 return uri + '.metadata-signature'
129 def _DeltaLogsUri(self, uri):
130 """Given a payload uri, find the uri for the delta generator logs."""
133 def _GeneratorUri(self):
134 """Find the URI for the au-generator.zip to use to generate this payload.
136 The intent is to always find a generator compatible with the version
137 that will process the update generated. Notice that Full updates must
138 be compatible with all versions, no matter how old.
141 URI of an au-generator.zip in string form.
143 if self._au_generator_uri_override:
144 return self._au_generator_uri_override
146 if (self.payload.src_image and
147 gspaths.VersionGreater(self.payload.src_image.version,
148 self.MINIMUM_GENERATOR_VERSION)):
149 # If we are a delta, and newer than the minimum delta age,
150 # Use the generator from the src.
151 return gspaths.ChromeosReleases.GeneratorUri(
152 self.payload.src_image.channel,
153 self.payload.src_image.board,
154 self.payload.src_image.version)
156 # If we are a full update, or a delta from older than minimum, use
157 # the minimum generator version.
158 return self.MINIMUM_GENERATOR_URI
160 def _PrepareGenerator(self):
161 """Download, and extract au-generate.zip into self.generator_dir."""
162 generator_uri = self._GeneratorUri()
164 logging.info('Preparing au-generate.zip from %s.', generator_uri)
166 # Extract zipped delta generator files to the expected directory.
167 tmp_zip = self.cache.GetFileInTempFile(generator_uri)
168 utils.RunCommand(['unzip', '-o', '-d', self.generator_dir, tmp_zip.name],
169 redirect_stdout=True, redirect_stderr=True)
172 def _RunGeneratorCmd(self, cmd):
173 """Wrapper for RunCommand for programs in self.generator_dir.
175 Adjusts the program name for the current self.au_generator directory, and
176 sets up the special requirements needed for these 'out of chroot'
177 programs. Will automatically log the command output if execution resulted
178 in a nonzero exit code. Note that the command's stdout and stderr are
179 combined into a single string. This also sets the TMPDIR variable
180 accordingly in the spawned process' environment.
183 cmd: Program and argument list in a list. ['delta_generator', '--help']
186 The output of the executed command.
189 cros_build_lib.RunCommandError if the command exited with a nonzero code.
192 # Adjust the command name to match the directory it's in.
193 cmd[0] = os.path.join(self.generator_dir, cmd[0])
195 # Modify the PATH and TMPDIR when running the script.
197 'PATH': utils.PathPrepend(self.generator_dir),
198 'TMPDIR': self.work_dir}
201 result = cros_build_lib.RunCommand(
203 cwd=self.generator_dir,
204 redirect_stdout=True,
205 combine_stdout_stderr=True,
209 # Dump error output and raise an exception if things went awry.
210 if result.returncode:
211 logging.error('Nonzero exit code (%d), dumping command output:\n%s',
212 result.returncode, result.output)
213 raise cros_build_lib.RunCommandError(
214 'Command failed: %s (cwd=%s)' % (' '.join(cmd), self.generator_dir),
220 def _BuildArg(flag, dict_obj, key, default=None):
221 """Returns a command-line argument iff its value is present in a dictionary.
224 flag: the flag name to use with the argument value, e.g. --foo; if None
225 or an empty string, no flag will be used
226 dict_obj: a dictionary mapping possible keys to values
227 key: the key of interest; e.g. 'foo'
228 default: a default value to use if key is not in dict_obj (optional)
231 If dict_obj[key] contains a non-False value or default is non-False,
232 returns a list containing the flag and value arguments (e.g. ['--foo',
233 'bar']), unless flag is empty/None, in which case returns a list
234 containing only the value argument (e.g. ['bar']). Otherwise, returns an
239 val = dict_obj.get(key) or default
241 arg_list = [str(val)]
243 arg_list.insert(0, flag)
247 def _PrepareImage(self, image, image_file):
248 """Download an prepare an image for delta generation.
250 Preparation includes downloading, extracting and converting the image into
251 an on-disk format, as necessary.
254 image: an object representing the image we're processing
255 image_file: file into which the prepared image should be copied.
258 logging.info('Preparing image from %s as %s', image.uri, image_file)
260 # Figure out what we're downloading and how to handle it.
261 image_handling_by_type = {
262 'signed': (None, True),
263 'test': (self.TEST_IMAGE_NAME, False),
264 'recovery': (self.RECOVERY_IMAGE_NAME, True),
266 extract_file, _ = image_handling_by_type[image.get('image_type', 'signed')]
268 # Are we donwloading an archive that contains the image?
270 # Archive will be downloaded to a temporary location.
271 with tempfile.NamedTemporaryFile(
272 prefix='image-archive-', suffix='.tar.xz', dir=self.work_dir,
273 delete=False) as temp_file:
274 download_file = temp_file.name
276 download_file = image_file
278 # Download the image file or archive.
279 self.cache.GetFileCopy(image.uri, download_file)
281 # If we downloaded an archive, extract the image file from it.
283 cmd = ['tar', '-xJf', download_file, extract_file]
284 cros_build_lib.RunCommand(cmd, cwd=self.work_dir)
286 # Rename it into the desired image name.
287 shutil.move(os.path.join(self.work_dir, extract_file), image_file)
289 # It's safe to delete the archive at this point.
290 os.remove(download_file)
292 def _GenerateUnsignedPayload(self):
293 """Generate the unsigned delta into self.payload_file."""
294 # Note that the command run here requires sudo access.
296 logging.info('Generating unsigned payload as %s', self.payload_file)
298 tgt_image = self.payload.tgt_image
299 cmd = ['cros_generate_update_payload',
301 '--output', self.payload_file,
302 '--image', self.tgt_image_file,
303 '--channel', tgt_image.channel,
304 '--board', tgt_image.board,
305 '--version', tgt_image.version,
307 cmd += self._BuildArg('--key', tgt_image, 'key', default='test')
308 cmd += self._BuildArg('--build_channel', tgt_image, 'image_channel',
309 default=tgt_image.channel)
310 cmd += self._BuildArg('--build_version', tgt_image, 'image_version',
311 default=tgt_image.version)
313 if self.payload.src_image:
314 src_image = self.payload.src_image
315 cmd += ['--src_image', self.src_image_file,
316 '--src_channel', src_image.channel,
317 '--src_board', src_image.board,
318 '--src_version', src_image.version,
320 cmd += self._BuildArg('--src_key', src_image, 'key', default='test')
321 cmd += self._BuildArg('--src_build_channel', src_image, 'image_channel',
322 default=src_image.channel)
323 cmd += self._BuildArg('--src_build_version', src_image, 'image_version',
324 default=src_image.version)
326 delta_log = self._RunGeneratorCmd(cmd)
327 self._StoreDeltaLog(delta_log)
329 def _GenPayloadHash(self):
330 """Generate a hash of payload and metadata.
332 Works from an unsigned update payload.
335 payload_hash as a string.
337 logging.info('Calculating payload hashes on %s.', self.payload_file)
339 # How big will the signatures be.
340 signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
342 with tempfile.NamedTemporaryFile('rb') as payload_hash_file:
343 cmd = ['delta_generator',
344 '-in_file', self.payload_file,
345 '-out_hash_file', payload_hash_file.name,
346 '-signature_size', ':'.join(signature_sizes)]
348 self._RunGeneratorCmd(cmd)
349 return payload_hash_file.read()
351 def _GenMetadataHash(self):
352 """Generate a hash of payload and metadata.
354 Works from an unsigned update payload.
357 metadata_hash as a string.
359 logging.info('Calculating payload hashes on %s.', self.payload_file)
361 # How big will the signatures be.
362 signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
364 with tempfile.NamedTemporaryFile('rb') as metadata_hash_file:
365 cmd = ['delta_generator',
366 '-in_file', self.payload_file,
367 '-out_metadata_hash_file', metadata_hash_file.name,
368 '-signature_size', ':'.join(signature_sizes)]
370 self._RunGeneratorCmd(cmd)
371 return metadata_hash_file.read()
373 def _GenerateSignerResultsError(self, format_str, *args):
374 """Helper for reporting errors with signer results."""
375 msg = format_str % args
377 raise UnexpectedSignerResultsError(msg)
379 def _SignHashes(self, hashes):
380 """Get the signer to sign the hashes with the update payload key via GS.
382 May sign each hash with more than one key, based on how many keysets are
386 hashes: List of hashes to be signed.
389 List of lists which contain each signed hash.
390 [[hash_1_sig_1, hash_1_sig_2], [hash_2_sig_1, hash_2_sig_2]]
392 logging.info('Signing payload hashes with %s.',
393 ', '.join(self.PAYLOAD_SIGNATURE_KEYSETS))
396 # [[hash_1_sig_1, hash_1_sig_2], [hash_2_sig_1, hash_2_sig_2]]
397 hashes_sigs = self.signer.GetHashSignatures(
399 keysets=self.PAYLOAD_SIGNATURE_KEYSETS)
401 if hashes_sigs is None:
402 self._GenerateSignerResultsError('Signing of hashes failed')
403 if len(hashes_sigs) != len(hashes):
404 self._GenerateSignerResultsError(
405 'Count of hashes signed (%d) != Count of hashes (%d).',
409 # Make sure that the results we get back the expected number of signatures.
410 for hash_sigs in hashes_sigs:
411 # Make sure each hash has the right number of signatures.
412 if len(hash_sigs) != len(self.PAYLOAD_SIGNATURE_SIZES_BYTES):
413 self._GenerateSignerResultsError(
414 'Signature count (%d) != Expected signature count (%d)',
416 len(self.PAYLOAD_SIGNATURE_SIZES_BYTES))
418 # Make sure each hash signature is the expected size.
419 for sig, sig_size in zip(hash_sigs, self.PAYLOAD_SIGNATURE_SIZES_BYTES):
420 if len(sig) != sig_size:
421 self._GenerateSignerResultsError(
422 'Signature size (%d) != expected size(%d)',
428 def _InsertPayloadSignatures(self, signatures):
429 """Put payload signatures into the payload they sign.
432 signatures: List of signatures for the payload.
434 logging.info('Inserting payload signatures into %s.',
435 self.signed_payload_file)
437 signature_files = [utils.CreateTempFileWithContents(s) for s in signatures]
438 signature_file_names = [f.name for f in signature_files]
440 cmd = ['delta_generator',
441 '-in_file', self.payload_file,
442 '-signature_file', ':'.join(signature_file_names),
443 '-out_file', self.signed_payload_file]
445 self._RunGeneratorCmd(cmd)
447 for f in signature_files:
450 def _StoreMetadataSignatures(self, signatures):
451 """Store metadata signatures related to the payload.
453 Our current format for saving metadata signatures only supports a single
454 signature at this time.
457 signatures: A list of metadata signatures in binary string format.
459 if len(signatures) != 1:
460 self._GenerateSignerResultsError(
461 'Received %d metadata signatures, only a single signature supported.',
464 logging.info('Saving metadata signatures in %s.',
465 self.metadata_signature_file)
467 encoded_signature = base64.b64encode(signatures[0])
469 with open(self.metadata_signature_file, 'w+') as f:
470 f.write(encoded_signature)
472 def _StoreDeltaLog(self, delta_log):
473 """Store delta log related to the payload.
475 Write out the delta log to a known file name. Mostly in it's own function
476 to simplify unittest mocks.
479 delta_log: The delta logs as a single string.
481 with open(self.delta_log_file, 'w+') as f:
484 def _SignPayload(self):
485 """Wrap all the steps for signing an existing payload."""
486 # Create hashes to sign.
487 payload_hash = self._GenPayloadHash()
488 metadata_hash = self._GenMetadataHash()
491 signatures = self._SignHashes([payload_hash, metadata_hash])
493 # Split them back up. A list of signatures per hash.
494 payload_signatures, metadata_signatures = signatures
496 # Insert payload signature(s).
497 self._InsertPayloadSignatures(payload_signatures)
499 # Store Metadata signature(s).
500 self._StoreMetadataSignatures(metadata_signatures)
503 """Create a given payload, if it doesn't already exist."""
505 logging.info('Generating %s payload %s',
506 'delta' if self.payload.src_image else 'full', self.payload)
508 # Fetch and extract the delta generator.
509 self._PrepareGenerator()
511 # Fetch and prepare the tgt image.
512 self._PrepareImage(self.payload.tgt_image, self.tgt_image_file)
514 # Fetch and prepare the src image.
515 if self.payload.src_image:
516 self._PrepareImage(self.payload.src_image, self.src_image_file)
518 # Generate the unsigned payload.
519 self._GenerateUnsignedPayload()
521 # Sign the payload, if needed.
525 def _CheckPayloadIntegrity(self, payload, is_delta, metadata_sig_file_name):
526 """Checks the integrity of a generated payload.
529 payload: an pre-initialized update_payload.Payload object.
530 is_delta: whether or not this is a delta payload (Boolean).
531 metadata_sig_file_name: metadata signature file.
534 PayloadVerificationError: when an error is encountered.
536 logging.info('Checking payload integrity')
537 with utils.CheckedOpen(metadata_sig_file_name) as metadata_sig_file:
539 # TODO(garnold)(chromium:243559) partition sizes should be embedded in
540 # the payload; ditch the default values once it's done.
541 # TODO(garnold)(chromium:261417) this disables the check for unmoved
542 # blocks in MOVE sequences, which is an inefficiency but not
543 # necessarily a problem. It should be re-enabled once the delta
544 # generator can optimize away such cases.
545 payload.Check(metadata_sig_file=metadata_sig_file,
546 assert_type=('delta' if is_delta else 'full'),
547 rootfs_part_size=self._DEFAULT_ROOTFS_PART_SIZE,
548 kernel_part_size=self._DEFAULT_KERNEL_PART_SIZE,
549 disabled_tests=['move-same-src-dst-block'])
550 except update_payload.PayloadError as e:
551 raise PayloadVerificationError(
552 'Payload integrity check failed: %s' % e)
554 def _ApplyPayload(self, payload, is_delta):
555 """Applies a generated payload and verifies the result.
558 payload: an pre-initialized update_payload.Payload object.
559 is_delta: whether or not this is a delta payload (Boolean).
562 PayloadVerificationError: when an error occurs.
564 # Extract the source/target kernel/rootfs partitions.
565 # TODO(garnold)(chromium:243561) this is a redundant operation as the
566 # partitions are already extracted (in some form) for the purpose of
567 # payload generation. We should only do this once.
568 cmd = ['cros_generate_update_payload',
571 '--image', self.tgt_image_file]
573 part_files['new_kernel_part'] = self._DEFAULT_NEW_KERN_PART
574 part_files['new_rootfs_part'] = self._DEFAULT_NEW_ROOT_PART
576 cmd += ['--src_image', self.src_image_file]
577 part_files['old_kernel_part'] = self._DEFAULT_OLD_KERN_PART
578 part_files['old_rootfs_part'] = self._DEFAULT_OLD_ROOT_PART
580 self._RunGeneratorCmd(cmd)
582 for part_name, part_file in part_files.items():
583 part_file = os.path.join(self.generator_dir, part_file)
584 if not os.path.isfile(part_file):
585 raise PayloadVerificationError('Failed to extract partition (%s)' %
587 part_files[part_name] = part_file
589 # Apply the payload and verify the result; make sure to pass in the
590 # explicit path to the bspatch binary in the au-generator directory (the
591 # one we need to be using), and not to depend on PATH resolution etc. Also
592 # note that we instruct the call to generate files with a .test suffix,
593 # which we can later compare to the actual target partition (as it was
594 # extracted from the target image above).
595 logging.info('Applying %s payload and verifying result',
596 'delta' if is_delta else 'full')
597 ref_new_kern_part = part_files['new_kernel_part']
598 part_files['new_kernel_part'] += '.test'
599 ref_new_root_part = part_files['new_rootfs_part']
600 part_files['new_rootfs_part'] += '.test'
601 bspatch_path = os.path.join(self.generator_dir, 'bspatch')
603 payload.Apply(bspatch_path=bspatch_path, **part_files)
604 except update_payload.PayloadError as e:
605 raise PayloadVerificationError('Payload failed to apply: %s' % e)
607 # Prior to comparing, remove unused space past the filesystem boundary
608 # in the extracted target partitions.
609 filelib.TruncateToSize(ref_new_kern_part,
610 os.path.getsize(part_files['new_kernel_part']))
611 filelib.TruncateToSize(ref_new_root_part,
612 os.path.getsize(part_files['new_rootfs_part']))
614 # Compare resulting partitions with the ones from the target image.
615 if not filecmp.cmp(ref_new_kern_part, part_files['new_kernel_part']):
616 raise PayloadVerificationError('Resulting kernel partition corrupted')
617 if not filecmp.cmp(ref_new_root_part, part_files['new_rootfs_part']):
618 raise PayloadVerificationError('Resulting rootfs partition corrupted')
620 def _VerifyPayload(self):
621 """Checks the integrity of the generated payload.
624 PayloadVerificationError when the payload fails to verify.
627 payload_file_name = self.signed_payload_file
628 metadata_sig_file_name = self.metadata_signature_file
630 payload_file_name = self.payload_file
631 metadata_sig_file_name = None
633 with open(payload_file_name) as payload_file:
634 payload = update_payload.Payload(payload_file)
635 is_delta = bool(self.payload.src_image)
639 # First, verify the payload's integrity.
640 self._CheckPayloadIntegrity(payload, is_delta, metadata_sig_file_name)
642 # Second, try to apply the payload and check the result.
643 self._ApplyPayload(payload, is_delta)
645 except update_payload.PayloadError as e:
646 raise PayloadVerificationError('Payload failed to verify: %s' % e)
648 def _UploadResults(self):
649 """Copy the payload generation results to the specified destination."""
651 logging.info('Uploading payload to %s.', self.payload.uri)
653 # Deliver the payload to the final location.
655 urilib.Copy(self.signed_payload_file, self.payload.uri)
656 urilib.Copy(self.metadata_signature_file,
657 self._MetadataUri(self.payload.uri))
659 urilib.Copy(self.payload_file, self.payload.uri)
661 # Upload delta generation log
662 urilib.Copy(self.delta_log_file, self._DeltaLogsUri(self.payload.uri))
665 """Create, verify and upload the results."""
666 self._drm(self._Create)
668 self._drm(self._VerifyPayload)
669 self._drm(self._UploadResults)
672 def DefaultPayloadUri(payload, random_str=None):
673 """Compute the default output URI for a payload.
675 For a glob that matches all potential URIs for this
676 payload, pass in a random_str of '*'.
679 payload: gspaths.Payload instance.
680 random_str: A hook to force a specific random_str. None means generate it.
683 Default URI for the payload.
686 if payload.src_image:
687 src_version = payload.src_image['version']
689 if payload.tgt_image.get('image_type', 'signed') == 'signed':
691 return gspaths.ChromeosReleases.PayloadUri(
692 channel=payload.tgt_image.channel,
693 board=payload.tgt_image.board,
694 version=payload.tgt_image.version,
695 random_str=random_str,
696 key=payload.tgt_image.key,
697 image_channel=payload.tgt_image.image_channel,
698 image_version=payload.tgt_image.image_version,
699 src_version=src_version,
700 bucket=payload.tgt_image.bucket)
702 # Unsigned test payload.
703 return gspaths.ChromeosReleases.PayloadUri(
704 channel=payload.tgt_image.channel,
705 board=payload.tgt_image.board,
706 version=payload.tgt_image.version,
707 random_str=random_str,
708 src_version=src_version,
709 bucket=payload.tgt_image.bucket)
712 def SetPayloadUri(payload, uri):
713 """Sets (overrides) the URI in a payload object.
716 payload: gspaths.Payload instance.
717 uri: A URI (string) to the payload file.
722 def FillInPayloadUri(payload, random_str=None):
723 """Fill in default output URI for a payload if missing.
726 payload: gspaths.Payload instance.
727 random_str: A hook to force a specific random_str. None means generate it.
730 SetPayloadUri(payload, DefaultPayloadUri(payload, random_str))
733 def _FilterNonPayloadUris(payload_uris):
734 """Filters out non-payloads from a list of GS URIs.
736 This essentially filters out known auxiliary artifacts whose names resemble /
737 derive from a respective payload name, such as files with .log and
738 .metadata-signature extensions.
741 payload_uris: a list of GS URIs (potentially) corresopnding to payloads
744 A filtered list of URIs.
746 return [uri for uri in payload_uris
747 if not (uri.endswith('.log') or uri.endswith('.metadata-signature'))]
750 def FindExistingPayloads(payload):
751 """Look to see if any matching payloads already exist.
753 Since payload names contain a random component, there can be multiple
754 names for a given payload. This function lists all existing payloads
755 that match the default URI for the given payload.
758 payload: gspaths.Payload instance.
761 List of URIs for existing payloads that match the default payload pattern.
763 search_uri = DefaultPayloadUri(payload, random_str='*')
764 return _FilterNonPayloadUris(urilib.ListFiles(search_uri))
767 def FindCacheDir(work_dir=None):
768 """Helper for deciding what cache directory to use.
771 work_dir: Directory that contains ALL work files, cache will
772 be created inside it, if present.
775 Returns a directory suitable for use with a DownloadCache. Will
776 always be consistent if a consistent work_dir is passed in.
778 # Discover which directory to use for caching
780 return os.path.join(work_dir, 'cache')
782 return '/usr/local/google/payloads'
785 def CreateAndUploadPayload(payload, cache, work_dir, sign=True, verify=True,
786 dry_run=False, au_generator_uri=None):
787 """Helper to create a PaygenPayloadLib instance and use it.
790 payload: An instance of utils.Payload describing the payload to generate.
791 cache: An instance of DownloadCache for retrieving files.
792 work_dir: A working directory that can hold scratch files. Will be cleaned
793 up when done, and won't interfere with other users. None for /tmp.
794 sign: Boolean saying if the payload should be signed (normally, you do).
795 verify: whether the payload should be verified (default: True)
796 dry_run: don't perform actual work
797 au_generator_uri: URI to override standard au_generator.zip rules.
799 with osutils.TempDir(prefix='paygen_payload.', base_dir=work_dir) as gen_dir:
800 logging.info('* Starting payload generation')
801 start_time = datetime.datetime.now()
803 _PaygenPayload(payload, cache, gen_dir, sign, verify, au_generator_uri,
804 dry_run=dry_run).Run()
806 end_time = datetime.datetime.now()
807 logging.info('* Finished payload generation in %s', end_time - start_time)