1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """This script is used to upload host prebuilts as well as board BINHOSTS.
7 Prebuilts are uploaded using gsutil to Google Storage. After these prebuilts
8 are successfully uploaded, a file is updated with the proper BINHOST version.
10 To read more about prebuilts/binhost binary packages please refer to:
11 http://goto/chromeos-prebuilts
13 Example of uploading prebuilt amd64 host files to Google Storage:
14 upload_prebuilts -p /b/cbuild/build -s -u gs://chromeos-prebuilt
16 Example of uploading x86-dogfood binhosts to Google Storage:
17 upload_prebuilts -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
20 from __future__ import print_function
24 import multiprocessing
29 from chromite.cbuildbot import constants
30 from chromite.cbuildbot import commands
31 from chromite.lib import binpkg
32 from chromite.lib import commandline
33 from chromite.lib import cros_build_lib
34 from chromite.lib import git
35 from chromite.lib import gs
36 from chromite.lib import osutils
37 from chromite.lib import parallel
38 from chromite.lib import portage_util
39 from chromite.lib import toolchain
41 # How many times to retry uploads.
44 # Multiplier for how long to sleep (in seconds) between retries; will delay
45 # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
48 _HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
49 _CATEGORIES_PATH = 'chroot/etc/portage/categories'
50 _PYM_PATH = 'chroot/usr/lib/portage/pym'
52 _BOARD_PATH = 'chroot/build/%(board)s'
53 _REL_BOARD_PATH = 'board/%(target)s/%(version)s'
54 _REL_HOST_PATH = 'host/%(host_arch)s/%(target)s/%(version)s'
55 # Private overlays to look at for builds to filter
56 # relative to build path
57 _PRIVATE_OVERLAY_DIR = 'src/private-overlays'
58 _GOOGLESTORAGE_ACL_FILE = 'googlestorage_acl.xml'
59 _GOOGLESTORAGE_GSUTIL_FILE = 'googlestorage_acl.txt'
60 _BINHOST_BASE_URL = 'gs://chromeos-prebuilt'
61 _PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
62 # Created in the event of new host targets becoming available
63 _PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
64 'make.conf.amd64-host')}
67 class BuildTarget(object):
68 """A board/variant/profile tuple."""
70 def __init__(self, board_variant, profile=None):
71 self.board_variant = board_variant
72 self.board, _, self.variant = board_variant.partition('_')
73 self.profile = profile
77 return '%s_%s' % (self.board_variant, self.profile)
79 return self.board_variant
81 def __eq__(self, other):
82 return str(other) == str(self)
85 return hash(str(self))
88 def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
89 """Update the key in file with the value passed.
93 Note quotes are added automatically
96 filename: Name of file to modify.
97 value: Value to write with the key.
98 key: The variable key to update. (Default: PORTAGE_BINHOST)
100 if os.path.exists(filename):
101 file_fh = open(filename)
103 file_fh = open(filename, 'w+')
106 keyval_str = '%(key)s=%(value)s'
108 # Strip newlines from end of line. We already add newlines below.
109 line = line.rstrip("\n")
111 if len(line.split('=')) != 2:
112 # Skip any line that doesn't fit key=val.
113 file_lines.append(line)
116 file_var, file_val = line.split('=')
119 print('Updating %s=%s to %s="%s"' % (file_var, file_val, key, value))
120 value = '"%s"' % value
121 file_lines.append(keyval_str % {'key': key, 'value': value})
123 file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
126 value = '"%s"' % value
127 file_lines.append(keyval_str % {'key': key, 'value': value})
131 osutils.WriteFile(filename, '\n'.join(file_lines) + '\n')
134 def RevGitFile(filename, data, retries=5, dryrun=False):
135 """Update and push the git file.
138 filename: file to modify that is in a git repo already
139 data: A dict of key/values to update in |filename|
140 retries: The number of times to retry before giving up, default: 5
141 dryrun: If True, do not actually commit the change.
143 prebuilt_branch = 'prebuilt_branch'
144 cwd = os.path.abspath(os.path.dirname(filename))
145 commit = git.RunGit(cwd, ['rev-parse', 'HEAD']).output.rstrip()
146 description = '%s: updating %s' % (os.path.basename(filename),
147 ', '.join(data.keys()))
148 # UpdateLocalFile will print out the keys/values for us.
149 print('Revving git file %s' % filename)
152 git.CreatePushBranch(prebuilt_branch, cwd)
153 for key, value in data.iteritems():
154 UpdateLocalFile(filename, value, key)
155 git.RunGit(cwd, ['add', filename])
156 git.RunGit(cwd, ['commit', '-m', description])
157 git.PushWithRetry(prebuilt_branch, cwd, dryrun=dryrun, retries=retries)
159 git.RunGit(cwd, ['checkout', commit])
163 """Get the version to put in LATEST and update the git version with."""
164 return datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
167 def _GsUpload(gs_context, acl, local_file, remote_file):
168 """Upload to GS bucket.
171 gs_context: A lib.gs.GSContext instance.
172 acl: The ACL to use for uploading the file.
173 local_file: The local file to be uploaded.
174 remote_file: The remote location to upload to.
177 Return the arg tuple of two if the upload failed
179 CANNED_ACLS = ['public-read', 'private', 'bucket-owner-read',
180 'authenticated-read', 'bucket-owner-full-control',
182 if acl in CANNED_ACLS:
183 gs_context.Copy(local_file, remote_file, acl=acl)
185 # For private uploads we assume that the overlay board is set up properly
186 # and a googlestore_acl.xml is present. Otherwise, this script errors.
187 gs_context.Copy(local_file, remote_file, acl='private')
188 if acl.endswith('.xml'):
189 # Apply the passed in ACL xml file to the uploaded object.
190 gs_context.SetACL(remote_file, acl=acl)
192 gs_context.ChangeACL(remote_file, acl_args_file=acl)
195 def RemoteUpload(gs_context, acl, files, pool=10):
196 """Upload to google storage.
198 Create a pool of process and call _GsUpload with the proper arguments.
201 gs_context: A lib.gs.GSContext instance.
202 acl: The canned acl used for uploading. acl can be one of: "public-read",
203 "public-read-write", "authenticated-read", "bucket-owner-read",
204 "bucket-owner-full-control", or "private".
205 files: dictionary with keys to local files and values to remote path.
206 pool: integer of maximum proesses to have at the same time.
209 Return a set of tuple arguments of the failed uploads
211 upload = functools.partial(_GsUpload, gs_context, acl)
212 tasks = [[key, value] for key, value in files.iteritems()]
213 parallel.RunTasksInProcessPool(upload, tasks, pool)
216 def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
217 """Build a dictionary of local remote file key pairs to upload.
220 base_local_path: The base path to the files on the local hard drive.
221 base_remote_path: The base path to the remote paths.
222 pkgs: The packages to upload.
225 Returns a dictionary of local_path/remote_path pairs
229 suffix = pkg['CPV'] + '.tbz2'
230 local_path = os.path.join(base_local_path, suffix)
231 assert os.path.exists(local_path)
232 remote_path = '%s/%s' % (base_remote_path.rstrip('/'), suffix)
233 upload_files[local_path] = remote_path
238 def GetBoardOverlay(build_path, target):
239 """Get the path to the board variant.
242 build_path: The path to the root of the build directory
243 target: The target board as a BuildTarget object.
246 The last overlay configured for the given board as a string.
248 board = target.board_variant
249 overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board,
250 buildroot=build_path)
251 # We only care about the last entry.
255 def DeterminePrebuiltConfFile(build_path, target):
256 """Determine the prebuilt.conf file that needs to be updated for prebuilts.
259 build_path: The path to the root of the build directory
260 target: String representation of the board. This includes host and board
264 A string path to a prebuilt.conf file to be updated.
266 if _HOST_ARCH == target:
268 # Without more examples of hosts this is a kludge for now.
269 # TODO(Scottz): as new host targets come online expand this to
270 # work more like boards.
271 make_path = _PREBUILT_MAKE_CONF[target]
274 board = GetBoardOverlay(build_path, target)
275 make_path = os.path.join(board, 'prebuilt.conf')
280 def UpdateBinhostConfFile(path, key, value):
281 """Update binhost config file file with key=value.
284 path: Filename to update.
286 value: New value for key.
288 cwd = os.path.dirname(os.path.abspath(path))
289 filename = os.path.basename(path)
290 osutils.SafeMakedirs(cwd)
291 if not git.GetCurrentBranch(cwd):
292 git.CreatePushBranch(constants.STABLE_EBUILD_BRANCH, cwd, sync=False)
293 osutils.WriteFile(path, '', mode='a')
294 UpdateLocalFile(path, value, key)
295 git.RunGit(cwd, ['add', filename])
296 description = '%s: updating %s' % (os.path.basename(filename), key)
297 git.RunGit(cwd, ['commit', '-m', description])
300 def GenerateHtmlIndex(files, index, board, version):
301 """Given the list of |files|, generate an index.html at |index|.
304 files: The list of files to link to.
305 index: The path to the html index.
306 board: Name of the board this index is for.
307 version: Build version this index is for.
311 <title>Package Prebuilt Index: %(board)s / %(version)s</title>
314 <h2>Package Prebuilt Index: %(board)s / %(version)s</h2>"""
321 '.|Google Storage Index',
324 commands.GenerateHtmlIndex(index, files, head=head)
327 def _GrabAllRemotePackageIndexes(binhost_urls):
328 """Grab all of the packages files associated with a list of binhost_urls.
331 binhost_urls: The URLs for the directories containing the Packages files we
335 A list of PackageIndex objects.
338 for url in binhost_urls:
339 pkg_index = binpkg.GrabRemotePackageIndex(url)
341 pkg_indexes.append(pkg_index)
345 class PrebuiltUploader(object):
346 """Synchronize host and board prebuilts."""
348 def __init__(self, upload_location, acl, binhost_base_url, pkg_indexes,
349 build_path, packages, skip_upload, binhost_conf_dir, dryrun,
350 target, slave_targets, version):
351 """Constructor for prebuilt uploader object.
353 This object can upload host or prebuilt files to Google Storage.
356 upload_location: The upload location.
357 acl: The canned acl used for uploading to Google Storage. acl can be one
358 of: "public-read", "public-read-write", "authenticated-read",
359 "bucket-owner-read", "bucket-owner-full-control", "project-private",
360 or "private" (see "gsutil help acls"). If we are not uploading to
361 Google Storage, this parameter is unused.
362 binhost_base_url: The URL used for downloading the prebuilts.
363 pkg_indexes: Old uploaded prebuilts to compare against. Instead of
364 uploading duplicate files, we just link to the old files.
365 build_path: The path to the directory containing the chroot.
366 packages: Packages to upload.
367 skip_upload: Don't actually upload the tarballs.
368 binhost_conf_dir: Directory where to store binhost.conf files.
369 dryrun: Don't push or upload prebuilts.
370 target: BuildTarget managed by this builder.
371 slave_targets: List of BuildTargets managed by slave builders.
372 version: A unique string, intended to be included in the upload path,
373 which identifies the version number of the uploaded prebuilts.
375 self._upload_location = upload_location
377 self._binhost_base_url = binhost_base_url
378 self._pkg_indexes = pkg_indexes
379 self._build_path = build_path
380 self._packages = set(packages)
381 self._found_packages = set()
382 self._skip_upload = skip_upload
383 self._binhost_conf_dir = binhost_conf_dir
384 self._dryrun = dryrun
385 self._target = target
386 self._slave_targets = slave_targets
387 self._version = version
388 self._gs_context = gs.GSContext(retries=_RETRIES, sleep=_SLEEP_TIME,
389 dry_run=self._dryrun)
391 def _Upload(self, local_file, remote_file):
392 """Wrapper around _GsUpload"""
393 _GsUpload(self._gs_context, self._acl, local_file, remote_file)
395 def _ShouldFilterPackage(self, pkg):
396 if not self._packages:
398 pym_path = os.path.abspath(os.path.join(self._build_path, _PYM_PATH))
399 sys.path.insert(0, pym_path)
400 # pylint: disable=F0401
401 import portage.versions
402 cat, pkgname = portage.versions.catpkgsplit(pkg['CPV'])[0:2]
403 cp = '%s/%s' % (cat, pkgname)
404 self._found_packages.add(cp)
405 return pkgname not in self._packages and cp not in self._packages
407 def _UploadPrebuilt(self, package_path, url_suffix):
408 """Upload host or board prebuilt files to Google Storage space.
411 package_path: The path to the packages dir.
412 url_suffix: The remote subdirectory where we should upload the packages.
414 # Process Packages file, removing duplicates and filtered packages.
415 pkg_index = binpkg.GrabLocalPackageIndex(package_path)
416 pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
417 pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage)
418 uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes)
419 unmatched_pkgs = self._packages - self._found_packages
421 cros_build_lib.Warning('unable to match packages: %r' % unmatched_pkgs)
423 # Write Packages file.
424 tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
426 remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
427 assert remote_location.startswith('gs://')
429 # Build list of files to upload.
430 upload_files = GenerateUploadDict(package_path, remote_location, uploads)
431 remote_file = '%s/Packages' % remote_location.rstrip('/')
432 upload_files[tmp_packages_file.name] = remote_file
434 RemoteUpload(self._gs_context, self._acl, upload_files)
436 with tempfile.NamedTemporaryFile(
437 prefix='chromite.upload_prebuilts.index.') as index:
439 [x[len(remote_location) + 1:] for x in upload_files.values()],
440 index.name, self._target, self._version)
441 self._Upload(index.name, '%s/index.html' % remote_location.rstrip('/'))
443 link_name = 'Prebuilts[%s]: %s' % (self._target, self._version)
444 url = '%s%s/index.html' % (gs.PUBLIC_BASE_HTTPS_URL,
445 remote_location[len(gs.BASE_GS_URL):])
446 cros_build_lib.PrintBuildbotLink(link_name, url)
448 def _UploadSdkTarball(self, board_path, url_suffix, prepackaged,
449 toolchain_tarballs, toolchain_upload_path):
450 """Upload a tarball of the sdk at the specified path to Google Storage.
453 board_path: The path to the board dir.
454 url_suffix: The remote subdirectory where we should upload the packages.
455 prepackaged: If given, a tarball that has been packaged outside of this
456 script and should be used.
457 toolchain_tarballs: List of toolchain tarballs to upload.
458 toolchain_upload_path: Path under the bucket to place toolchain tarballs.
460 remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
461 assert remote_location.startswith('gs://')
462 boardname = os.path.basename(board_path.rstrip('/'))
463 # We do not upload non SDK board tarballs,
464 assert boardname == constants.CHROOT_BUILDER_BOARD
465 assert prepackaged is not None
467 version_str = self._version[len('chroot-'):]
468 remote_tarfile = toolchain.GetSdkURL(
469 for_gsutil=True, suburl='cros-sdk-%s.tar.xz' % (version_str,))
470 # For SDK, also upload the manifest which is guaranteed to exist
471 # by the builderstage.
472 self._Upload(prepackaged + '.Manifest', remote_tarfile + '.Manifest')
473 self._Upload(prepackaged, remote_tarfile)
475 # Post the toolchain tarballs too.
476 for tarball in toolchain_tarballs:
477 target, local_path = tarball.split(':')
478 suburl = toolchain_upload_path % {'target': target}
479 remote_path = toolchain.GetSdkURL(for_gsutil=True, suburl=suburl)
480 self._Upload(local_path, remote_path)
482 # Finally, also update the pointer to the latest SDK on which polling
484 with osutils.TempDir() as tmpdir:
485 pointerfile = os.path.join(tmpdir, 'cros-sdk-latest.conf')
486 remote_pointerfile = toolchain.GetSdkURL(for_gsutil=True,
487 suburl='cros-sdk-latest.conf')
488 osutils.WriteFile(pointerfile, 'LATEST_SDK="%s"' % version_str)
489 self._Upload(pointerfile, remote_pointerfile)
491 def _GetTargets(self):
492 """Retuns the list of targets to use."""
493 targets = self._slave_targets[:]
495 targets.append(self._target)
499 def SyncHostPrebuilts(self, key, git_sync, sync_binhost_conf):
500 """Synchronize host prebuilt files.
502 This function will sync both the standard host packages, plus the host
503 packages associated with all targets that have been "setup" with the
504 current host's chroot. For instance, if this host has been used to build
505 x86-generic, it will sync the host packages associated with
506 'i686-pc-linux-gnu'. If this host has also been used to build arm-generic,
507 it will also sync the host packages associated with
508 'armv7a-cros-linux-gnueabi'.
511 key: The variable key to update in the git file.
512 git_sync: If set, update make.conf of target to reference the latest
513 prebuilt packages generated here.
514 sync_binhost_conf: If set, update binhost config file in
515 chromiumos-overlay for the host.
517 # Slave boards are listed before the master board so that the master board
518 # takes priority (i.e. x86-generic preflight host prebuilts takes priority
519 # over preflight host prebuilts from other builders.)
521 for target in self._GetTargets():
522 url_suffix = _REL_HOST_PATH % {'version': self._version,
523 'host_arch': _HOST_ARCH,
525 packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
527 if self._target == target and not self._skip_upload:
529 package_path = os.path.join(self._build_path, _HOST_PACKAGES_PATH)
530 self._UploadPrebuilt(package_path, packages_url_suffix)
532 # Record URL where prebuilts were uploaded.
533 binhost_urls.append('%s/%s/' % (self._binhost_base_url.rstrip('/'),
534 packages_url_suffix.rstrip('/')))
536 binhost = ' '.join(binhost_urls)
538 git_file = os.path.join(self._build_path,
539 _PREBUILT_MAKE_CONF[_HOST_ARCH])
540 RevGitFile(git_file, {key: binhost}, dryrun=self._dryrun)
541 if sync_binhost_conf:
542 binhost_conf = os.path.join(self._build_path, self._binhost_conf_dir,
543 'host', '%s-%s.conf' % (_HOST_ARCH, key))
544 UpdateBinhostConfFile(binhost_conf, key, binhost)
546 def SyncBoardPrebuilts(self, key, git_sync, sync_binhost_conf,
547 upload_board_tarball, prepackaged_board,
548 toolchain_tarballs, toolchain_upload_path):
549 """Synchronize board prebuilt files.
552 key: The variable key to update in the git file.
553 git_sync: If set, update make.conf of target to reference the latest
554 prebuilt packages generated here.
555 sync_binhost_conf: If set, update binhost config file in
556 chromiumos-overlay for the current board.
557 upload_board_tarball: Include a tarball of the board in our upload.
558 prepackaged_board: A tarball of the board built outside of this script.
559 toolchain_tarballs: A list of toolchain tarballs to upload.
560 toolchain_upload_path: Path under the bucket to place toolchain tarballs.
562 for target in self._GetTargets():
563 board_path = os.path.join(self._build_path,
564 _BOARD_PATH % {'board': target.board_variant})
565 package_path = os.path.join(board_path, 'packages')
566 url_suffix = _REL_BOARD_PATH % {'target': target,
567 'version': self._version}
568 packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
570 # Process the target board differently if it is the main --board.
571 if self._target == target and not self._skip_upload:
572 # This strips "chroot" prefix because that is sometimes added as the
573 # --prepend-version argument (e.g. by chromiumos-sdk bot).
574 # TODO(build): Clean it up to be less hard-coded.
575 version_str = self._version[len('chroot-'):]
577 # Upload board tarballs in the background.
578 if upload_board_tarball:
579 if toolchain_upload_path:
580 toolchain_upload_path %= {'version': version_str}
581 tar_process = multiprocessing.Process(
582 target=self._UploadSdkTarball,
583 args=(board_path, url_suffix, prepackaged_board,
584 toolchain_tarballs, toolchain_upload_path))
588 self._UploadPrebuilt(package_path, packages_url_suffix)
590 # Make sure we finished uploading the board tarballs.
591 if upload_board_tarball:
593 assert tar_process.exitcode == 0
594 # TODO(zbehan): This should be done cleaner.
595 if target.board == constants.CHROOT_BUILDER_BOARD:
596 sdk_conf = os.path.join(self._build_path, self._binhost_conf_dir,
597 'host/sdk_version.conf')
599 'SDK_LATEST_VERSION': version_str,
600 'TC_PATH': toolchain_upload_path,
602 RevGitFile(sdk_conf, sdk_settings, dryrun=self._dryrun)
604 # Record URL where prebuilts were uploaded.
605 url_value = '%s/%s/' % (self._binhost_base_url.rstrip('/'),
606 packages_url_suffix.rstrip('/'))
609 git_file = DeterminePrebuiltConfFile(self._build_path, target)
610 RevGitFile(git_file, {key: url_value}, dryrun=self._dryrun)
612 if sync_binhost_conf:
613 # Update the binhost configuration file in git.
614 binhost_conf = os.path.join(self._build_path, self._binhost_conf_dir,
615 'target', '%s-%s.conf' % (target, key))
616 UpdateBinhostConfFile(binhost_conf, key, url_value)
619 def _AddSlaveBoard(_option, _opt_str, value, parser):
620 """Callback that adds a slave board to the list of slave targets."""
621 parser.values.slave_targets.append(BuildTarget(value))
624 def _AddSlaveProfile(_option, _opt_str, value, parser):
625 """Callback that adds a slave profile to the list of slave targets."""
626 if not parser.values.slave_targets:
627 parser.error('Must specify --slave-board before --slave-profile')
628 if parser.values.slave_targets[-1].profile is not None:
629 parser.error('Cannot specify --slave-profile twice for same board')
630 parser.values.slave_targets[-1].profile = value
633 def ParseOptions(argv):
634 """Returns options given by the user and the target specified.
637 argv: The args to parse.
640 A tuple containing a parsed options object and BuildTarget.
641 The target instance is None if no board is specified.
643 parser = commandline.OptionParser()
644 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
645 default=_BINHOST_BASE_URL,
646 help='Base URL to use for binhost in make.conf updates')
647 parser.add_option('', '--previous-binhost-url', action='append',
648 default=[], dest='previous_binhost_url',
649 help='Previous binhost URL')
650 parser.add_option('-b', '--board', dest='board', default=None,
651 help='Board type that was built on this machine')
652 parser.add_option('-B', '--prepackaged-tarball', dest='prepackaged_tarball',
654 help='Board tarball prebuilt outside of this script.')
655 parser.add_option('--toolchain-tarball', dest='toolchain_tarballs',
656 action='append', default=[],
657 help='Redistributable toolchain tarball.')
658 parser.add_option('--toolchain-upload-path', default='',
659 help='Path to place toolchain tarballs in the sdk tree.')
660 parser.add_option('', '--profile', dest='profile', default=None,
661 help='Profile that was built on this machine')
662 parser.add_option('', '--slave-board', default=[], action='callback',
663 dest='slave_targets', type='string',
664 callback=_AddSlaveBoard,
665 help='Board type that was built on a slave machine. To '
666 'add a profile to this board, use --slave-profile.')
667 parser.add_option('', '--slave-profile', action='callback', type='string',
668 callback=_AddSlaveProfile,
669 help='Board profile that was built on a slave machine. '
670 'Applies to previous slave board.')
671 parser.add_option('-p', '--build-path', dest='build_path',
672 help='Path to the directory containing the chroot')
673 parser.add_option('', '--packages', action='append',
674 default=[], dest='packages',
675 help='Only include the specified packages. '
676 '(Default is to include all packages.)')
677 parser.add_option('-s', '--sync-host', dest='sync_host',
678 default=False, action='store_true',
679 help='Sync host prebuilts')
680 parser.add_option('-g', '--git-sync', dest='git_sync',
681 default=False, action='store_true',
682 help='Enable git version sync (This commits to a repo.) '
683 'This is used by full builders to commit directly '
684 'to board overlays.')
685 parser.add_option('-u', '--upload', dest='upload',
687 help='Upload location')
688 parser.add_option('-V', '--prepend-version', dest='prepend_version',
690 help='Add an identifier to the front of the version')
691 parser.add_option('-f', '--filters', dest='filters', action='store_true',
693 help='Turn on filtering of private ebuild packages')
694 parser.add_option('-k', '--key', dest='key',
695 default='PORTAGE_BINHOST',
696 help='Key to update in make.conf / binhost.conf')
697 parser.add_option('', '--set-version', dest='set_version',
699 help='Specify the version string')
700 parser.add_option('', '--sync-binhost-conf', dest='sync_binhost_conf',
701 default=False, action='store_true',
702 help='Update binhost.conf in chromiumos-overlay or '
703 'chromeos-overlay. Commit the changes, but don\'t '
704 'push them. This is used for preflight binhosts.')
705 parser.add_option('', '--binhost-conf-dir', dest='binhost_conf_dir',
706 help='Directory to commit binhost config with '
707 '--sync-binhost-conf.')
708 parser.add_option('-P', '--private', dest='private', action='store_true',
709 default=False, help='Mark gs:// uploads as private.')
710 parser.add_option('', '--skip-upload', dest='skip_upload',
711 action='store_true', default=False,
712 help='Skip upload step.')
713 parser.add_option('', '--upload-board-tarball', dest='upload_board_tarball',
714 action='store_true', default=False,
715 help='Upload board tarball to Google Storage.')
716 parser.add_option('-n', '--dry-run', dest='dryrun',
717 action='store_true', default=False,
718 help='Don\'t push or upload prebuilts.')
720 options, args = parser.parse_args(argv)
721 if not options.build_path:
722 parser.error('you need provide a chroot path')
723 if not options.upload and not options.skip_upload:
724 parser.error('you need to provide an upload location using -u')
725 if not options.set_version and options.skip_upload:
726 parser.error('If you are using --skip-upload, you must specify a '
727 'version number using --set-version.')
729 parser.error('invalid arguments passed to upload_prebuilts: %r' % args)
733 target = BuildTarget(options.board, options.profile)
735 if target in options.slave_targets:
736 parser.error('--board/--profile must not also be a slave target.')
738 if len(set(options.slave_targets)) != len(options.slave_targets):
739 parser.error('--slave-boards must not have duplicates.')
741 if options.slave_targets and options.git_sync:
742 parser.error('--slave-boards is not compatible with --git-sync')
744 if (options.upload_board_tarball and options.skip_upload and
745 options.board == 'amd64-host'):
746 parser.error('--skip-upload is not compatible with '
747 '--upload-board-tarball and --board=amd64-host')
749 if (options.upload_board_tarball and not options.skip_upload and
750 not options.upload.startswith('gs://')):
751 parser.error('--upload-board-tarball only works with gs:// URLs.\n'
752 '--upload must be a gs:// URL.')
754 if options.upload_board_tarball and options.prepackaged_tarball is None:
755 parser.error('--upload-board-tarball requires --prepackaged-tarball')
758 if options.sync_host:
759 parser.error('--private and --sync-host/-s cannot be specified '
760 'together; we do not support private host prebuilts')
762 if not options.upload or not options.upload.startswith('gs://'):
763 parser.error('--private is only valid for gs:// URLs; '
764 '--upload must be a gs:// URL.')
766 if options.binhost_base_url != _BINHOST_BASE_URL:
767 parser.error('when using --private the --binhost-base-url '
768 'is automatically derived.')
770 if options.sync_binhost_conf and not options.binhost_conf_dir:
771 parser.error('--sync-binhost-conf requires --binhost-conf-dir')
773 return options, target
777 # Set umask to a sane value so that files created as root are readable.
780 options, target = ParseOptions(argv)
782 # Calculate a list of Packages index files to compare against. Whenever we
783 # upload a package, we check to make sure it's not already stored in one of
784 # the packages files we uploaded. This list of packages files might contain
785 # both board and host packages.
786 pkg_indexes = _GrabAllRemotePackageIndexes(options.previous_binhost_url)
788 if options.set_version:
789 version = options.set_version
791 version = GetVersion()
793 if options.prepend_version:
794 version = '%s-%s' % (options.prepend_version, version)
797 binhost_base_url = options.binhost_base_url
800 binhost_base_url = options.upload
802 board_path = GetBoardOverlay(options.build_path, target)
803 # Use the gsutil acl ch argument file if it exists, or fall back to the
805 acl = os.path.join(board_path, _GOOGLESTORAGE_GSUTIL_FILE)
806 if not os.path.isfile(acl):
807 acl = os.path.join(board_path, _GOOGLESTORAGE_ACL_FILE)
809 uploader = PrebuiltUploader(options.upload, acl, binhost_base_url,
810 pkg_indexes, options.build_path,
811 options.packages, options.skip_upload,
812 options.binhost_conf_dir, options.dryrun,
813 target, options.slave_targets, version)
815 if options.sync_host:
816 uploader.SyncHostPrebuilts(options.key, options.git_sync,
817 options.sync_binhost_conf)
819 if options.board or options.slave_targets:
820 uploader.SyncBoardPrebuilts(options.key, options.git_sync,
821 options.sync_binhost_conf,
822 options.upload_board_tarball,
823 options.prepackaged_tarball,
824 options.toolchain_tarballs,
825 options.toolchain_upload_path)