2 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """This script is used to upload host prebuilts as well as board BINHOSTS.
8 Prebuilts are uploaded using gsutil to Google Storage. After these prebuilts
9 are successfully uploaded, a file is updated with the proper BINHOST version.
11 To read more about prebuilts/binhost binary packages please refer to:
12 http://goto/chromeos-prebuilts
14 Example of uploading prebuilt amd64 host files to Google Storage:
15 upload_prebuilts -p /b/cbuild/build -s -u gs://chromeos-prebuilt
17 Example of uploading x86-dogfood binhosts to Google Storage:
18 upload_prebuilts -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
22 import multiprocessing
27 from chromite.buildbot import constants
28 from chromite.buildbot import portage_utilities
29 from chromite.lib import binpkg
30 from chromite.lib import cros_build_lib
31 from chromite.lib import git
32 from chromite.lib import gs
33 from chromite.lib import osutils
34 from chromite.lib import parallel
35 from chromite.lib import toolchain
37 # How many times to retry uploads.
40 # Multiplier for how long to sleep (in seconds) between retries; will delay
41 # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
44 _HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
45 _CATEGORIES_PATH = 'chroot/etc/portage/categories'
46 _PYM_PATH = 'chroot/usr/lib/portage/pym'
48 _BOARD_PATH = 'chroot/build/%(board)s'
49 _REL_BOARD_PATH = 'board/%(target)s/%(version)s'
50 _REL_HOST_PATH = 'host/%(host_arch)s/%(target)s/%(version)s'
51 # Private overlays to look at for builds to filter
52 # relative to build path
53 _PRIVATE_OVERLAY_DIR = 'src/private-overlays'
54 _GOOGLESTORAGE_ACL_FILE = 'googlestorage_acl.xml'
55 _BINHOST_BASE_URL = 'gs://chromeos-prebuilt'
56 _PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
57 # Created in the event of new host targets becoming available
58 _PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
59 'make.conf.amd64-host')}
60 _BINHOST_CONF_DIR = 'src/third_party/chromiumos-overlay/chromeos/binhost'
63 class BuildTarget(object):
64 """A board/variant/profile tuple."""
66 def __init__(self, board_variant, profile=None):
67 self.board_variant = board_variant
68 self.board, _, self.variant = board_variant.partition('_')
69 self.profile = profile
73 return '%s_%s' % (self.board_variant, self.profile)
75 return self.board_variant
77 def __eq__(self, other):
78 return str(other) == str(self)
81 return hash(str(self))
84 def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
85 """Update the key in file with the value passed.
89 Note quotes are added automatically
92 filename: Name of file to modify.
93 value: Value to write with the key.
94 key: The variable key to update. (Default: PORTAGE_BINHOST)
96 if os.path.exists(filename):
97 file_fh = open(filename)
99 file_fh = open(filename, 'w+')
102 keyval_str = '%(key)s=%(value)s'
104 # Strip newlines from end of line. We already add newlines below.
105 line = line.rstrip("\n")
107 if len(line.split('=')) != 2:
108 # Skip any line that doesn't fit key=val.
109 file_lines.append(line)
112 file_var, file_val = line.split('=')
115 print 'Updating %s=%s to %s="%s"' % (file_var, file_val, key, value)
116 value = '"%s"' % value
117 file_lines.append(keyval_str % {'key': key, 'value': value})
119 file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
122 value = '"%s"' % value
123 file_lines.append(keyval_str % {'key': key, 'value': value})
127 osutils.WriteFile(filename, '\n'.join(file_lines) + '\n')
130 def RevGitFile(filename, data, retries=5, dryrun=False):
131 """Update and push the git file.
134 filename: file to modify that is in a git repo already
135 data: A dict of key/values to update in |filename|
136 retries: The number of times to retry before giving up, default: 5
137 dryrun: If True, do not actually commit the change.
139 prebuilt_branch = 'prebuilt_branch'
140 cwd = os.path.abspath(os.path.dirname(filename))
141 commit = git.RunGit(cwd, ['rev-parse', 'HEAD']).output.rstrip()
142 description = '%s: updating %s' % (os.path.basename(filename),
143 ', '.join(data.keys()))
144 # UpdateLocalFile will print out the keys/values for us.
145 print 'Revving git file %s' % filename
148 git.CreatePushBranch(prebuilt_branch, cwd)
149 for key, value in data.iteritems():
150 UpdateLocalFile(filename, value, key)
151 git.RunGit(cwd, ['add', filename])
152 git.RunGit(cwd, ['commit', '-m', description])
153 git.PushWithRetry(prebuilt_branch, cwd, dryrun=dryrun, retries=retries)
155 git.RunGit(cwd, ['checkout', commit])
159 """Get the version to put in LATEST and update the git version with."""
160 return datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
163 def _GsUpload(local_file, remote_file, acl):
164 """Upload to GS bucket.
167 local_file: The local file to be uploaded.
168 remote_file: The remote location to upload to.
169 acl: The ACL to use for uploading the file.
172 Return the arg tuple of two if the upload failed
174 CANNED_ACLS = ['public-read', 'private', 'bucket-owner-read',
175 'authenticated-read', 'bucket-owner-full-control',
177 gs_context = gs.GSContext(retries=_RETRIES, sleep=_SLEEP_TIME)
178 if acl in CANNED_ACLS:
179 gs_context.Copy(local_file, remote_file, acl=acl)
181 # For private uploads we assume that the overlay board is set up properly
182 # and a googlestore_acl.xml is present. Otherwise, this script errors.
183 gs_context.Copy(local_file, remote_file, acl='private')
184 # Apply the passed in ACL xml file to the uploaded object.
185 gs_context.SetACL(remote_file, acl=acl)
187 def RemoteUpload(acl, files, pool=10):
188 """Upload to google storage.
190 Create a pool of process and call _GsUpload with the proper arguments.
193 acl: The canned acl used for uploading. acl can be one of: "public-read",
194 "public-read-write", "authenticated-read", "bucket-owner-read",
195 "bucket-owner-full-control", or "private".
196 files: dictionary with keys to local files and values to remote path.
197 pool: integer of maximum proesses to have at the same time.
200 Return a set of tuple arguments of the failed uploads
202 tasks = [[key, value, acl] for key, value in files.iteritems()]
203 parallel.RunTasksInProcessPool(_GsUpload, tasks, pool)
206 def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
207 """Build a dictionary of local remote file key pairs to upload.
210 base_local_path: The base path to the files on the local hard drive.
211 base_remote_path: The base path to the remote paths.
212 pkgs: The packages to upload.
215 Returns a dictionary of local_path/remote_path pairs
219 suffix = pkg['CPV'] + '.tbz2'
220 local_path = os.path.join(base_local_path, suffix)
221 assert os.path.exists(local_path)
222 remote_path = '%s/%s' % (base_remote_path.rstrip('/'), suffix)
223 upload_files[local_path] = remote_path
227 def GetBoardOverlay(build_path, target):
228 """Get the path to the board variant.
231 build_path: The path to the root of the build directory
232 target: The target board as a BuildTarget object.
235 The last overlay configured for the given board as a string.
237 board = target.board_variant
238 overlays = portage_utilities.FindOverlays(constants.BOTH_OVERLAYS, board,
239 buildroot=build_path)
240 # We only care about the last entry.
244 def DeterminePrebuiltConfFile(build_path, target):
245 """Determine the prebuilt.conf file that needs to be updated for prebuilts.
248 build_path: The path to the root of the build directory
249 target: String representation of the board. This includes host and board
253 A string path to a prebuilt.conf file to be updated.
255 if _HOST_ARCH == target:
257 # Without more examples of hosts this is a kludge for now.
258 # TODO(Scottz): as new host targets come online expand this to
259 # work more like boards.
260 make_path = _PREBUILT_MAKE_CONF[target]
263 board = GetBoardOverlay(build_path, target)
264 make_path = os.path.join(board, 'prebuilt.conf')
269 def UpdateBinhostConfFile(path, key, value):
270 """Update binhost config file file with key=value.
273 path: Filename to update.
275 value: New value for key.
277 cwd = os.path.dirname(os.path.abspath(path))
278 filename = os.path.basename(path)
279 osutils.SafeMakedirs(cwd)
280 if not git.GetCurrentBranch(cwd):
281 git.CreatePushBranch(constants.STABLE_EBUILD_BRANCH, cwd, sync=False)
282 osutils.WriteFile(path, '', mode='a')
283 UpdateLocalFile(path, value, key)
284 git.RunGit(cwd, ['add', filename])
285 description = '%s: updating %s' % (os.path.basename(filename), key)
286 git.RunGit(cwd, ['commit', '-m', description])
289 def _GrabAllRemotePackageIndexes(binhost_urls):
290 """Grab all of the packages files associated with a list of binhost_urls.
293 binhost_urls: The URLs for the directories containing the Packages files we
297 A list of PackageIndex objects.
300 for url in binhost_urls:
301 pkg_index = binpkg.GrabRemotePackageIndex(url)
303 pkg_indexes.append(pkg_index)
307 class PrebuiltUploader(object):
308 """Synchronize host and board prebuilts."""
310 def __init__(self, upload_location, acl, binhost_base_url,
311 pkg_indexes, build_path, packages, skip_upload,
312 binhost_conf_dir, debug, target, slave_targets):
313 """Constructor for prebuilt uploader object.
315 This object can upload host or prebuilt files to Google Storage.
318 upload_location: The upload location.
319 acl: The canned acl used for uploading to Google Storage. acl can be one
320 of: "public-read", "public-read-write", "authenticated-read",
321 "bucket-owner-read", "bucket-owner-full-control", "project-private",
322 or "private" (see "gsutil help acls"). If we are not uploading to
323 Google Storage, this parameter is unused.
324 binhost_base_url: The URL used for downloading the prebuilts.
325 pkg_indexes: Old uploaded prebuilts to compare against. Instead of
326 uploading duplicate files, we just link to the old files.
327 build_path: The path to the directory containing the chroot.
328 packages: Packages to upload.
329 skip_upload: Don't actually upload the tarballs.
330 binhost_conf_dir: Directory where to store binhost.conf files.
331 debug: Don't push or upload prebuilts.
332 target: BuildTarget managed by this builder.
333 slave_targets: List of BuildTargets managed by slave builders.
335 self._upload_location = upload_location
337 self._binhost_base_url = binhost_base_url
338 self._pkg_indexes = pkg_indexes
339 self._build_path = build_path
340 self._packages = set(packages)
341 self._found_packages = set()
342 self._skip_upload = skip_upload
343 self._binhost_conf_dir = binhost_conf_dir
345 self._target = target
346 self._slave_targets = slave_targets
348 def _ShouldFilterPackage(self, pkg):
349 if not self._packages:
351 pym_path = os.path.abspath(os.path.join(self._build_path, _PYM_PATH))
352 sys.path.insert(0, pym_path)
353 # pylint: disable=F0401
354 import portage.versions
355 cat, pkgname = portage.versions.catpkgsplit(pkg['CPV'])[0:2]
356 cp = '%s/%s' % (cat, pkgname)
357 self._found_packages.add(cp)
358 return pkgname not in self._packages and cp not in self._packages
360 def _UploadPrebuilt(self, package_path, url_suffix):
361 """Upload host or board prebuilt files to Google Storage space.
364 package_path: The path to the packages dir.
365 url_suffix: The remote subdirectory where we should upload the packages.
367 # Process Packages file, removing duplicates and filtered packages.
368 pkg_index = binpkg.GrabLocalPackageIndex(package_path)
369 pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
370 pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage)
371 uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes)
372 unmatched_pkgs = self._packages - self._found_packages
374 cros_build_lib.Warning('unable to match packages: %r' % unmatched_pkgs)
376 # Write Packages file.
377 tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
379 remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
380 assert remote_location.startswith('gs://')
382 # Build list of files to upload.
383 upload_files = GenerateUploadDict(package_path, remote_location, uploads)
384 remote_file = '%s/Packages' % remote_location.rstrip('/')
385 upload_files[tmp_packages_file.name] = remote_file
387 RemoteUpload(self._acl, upload_files)
389 def _UploadSdkTarball(self, board_path, url_suffix, version, prepackaged,
390 toolchain_tarballs, toolchain_upload_path):
391 """Upload a tarball of the sdk at the specified path to Google Storage.
394 board_path: The path to the board dir.
395 url_suffix: The remote subdirectory where we should upload the packages.
396 version: The version of the board.
397 prepackaged: If given, a tarball that has been packaged outside of this
398 script and should be used.
399 toolchain_tarballs: List of toolchain tarballs to upload.
400 toolchain_upload_path: Path under the bucket to place toolchain tarballs.
402 remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
403 assert remote_location.startswith('gs://')
404 boardname = os.path.basename(board_path.rstrip('/'))
405 # We do not upload non SDK board tarballs,
406 assert boardname == constants.CHROOT_BUILDER_BOARD
407 assert prepackaged is not None
409 version_str = version[len('chroot-'):]
410 remote_tarfile = toolchain.GetSdkURL(
411 for_gsutil=True, suburl='cros-sdk-%s.tar.xz' % (version_str,))
412 # For SDK, also upload the manifest which is guaranteed to exist
413 # by the builderstage.
414 _GsUpload(prepackaged + '.Manifest', remote_tarfile + '.Manifest',
416 _GsUpload(prepackaged, remote_tarfile, self._acl)
418 # Post the toolchain tarballs too.
419 for tarball in toolchain_tarballs:
420 target, local_path = tarball.split(':')
421 suburl = toolchain_upload_path % {'target': target}
422 remote_path = toolchain.GetSdkURL(for_gsutil=True, suburl=suburl)
423 _GsUpload(local_path, remote_path, self._acl)
425 # Finally, also update the pointer to the latest SDK on which polling
427 with osutils.TempDir() as tmpdir:
428 pointerfile = os.path.join(tmpdir, 'cros-sdk-latest.conf')
429 remote_pointerfile = toolchain.GetSdkURL(for_gsutil=True,
430 suburl='cros-sdk-latest.conf')
431 osutils.WriteFile(pointerfile, 'LATEST_SDK="%s"' % version_str)
432 _GsUpload(pointerfile, remote_pointerfile, self._acl)
434 def _GetTargets(self):
435 """Retuns the list of targets to use."""
436 targets = self._slave_targets[:]
438 targets.append(self._target)
442 def SyncHostPrebuilts(self, version, key, git_sync, sync_binhost_conf):
443 """Synchronize host prebuilt files.
445 This function will sync both the standard host packages, plus the host
446 packages associated with all targets that have been "setup" with the
447 current host's chroot. For instance, if this host has been used to build
448 x86-generic, it will sync the host packages associated with
449 'i686-pc-linux-gnu'. If this host has also been used to build arm-generic,
450 it will also sync the host packages associated with
451 'armv7a-cros-linux-gnueabi'.
454 version: A unique string, intended to be included in the upload path,
455 which identifies the version number of the uploaded prebuilts.
456 key: The variable key to update in the git file.
457 git_sync: If set, update make.conf of target to reference the latest
458 prebuilt packages generated here.
459 sync_binhost_conf: If set, update binhost config file in
460 chromiumos-overlay for the host.
462 # Slave boards are listed before the master board so that the master board
463 # takes priority (i.e. x86-generic preflight host prebuilts takes priority
464 # over preflight host prebuilts from other builders.)
466 for target in self._GetTargets():
467 url_suffix = _REL_HOST_PATH % {'version': version,
468 'host_arch': _HOST_ARCH,
470 packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
472 if self._target == target and not self._skip_upload and not self._debug:
474 package_path = os.path.join(self._build_path, _HOST_PACKAGES_PATH)
475 self._UploadPrebuilt(package_path, packages_url_suffix)
477 # Record URL where prebuilts were uploaded.
478 binhost_urls.append('%s/%s/' % (self._binhost_base_url.rstrip('/'),
479 packages_url_suffix.rstrip('/')))
481 binhost = ' '.join(binhost_urls)
483 git_file = os.path.join(self._build_path,
484 _PREBUILT_MAKE_CONF[_HOST_ARCH])
485 RevGitFile(git_file, {key: binhost}, dryrun=self._debug)
486 if sync_binhost_conf:
487 binhost_conf = os.path.join(self._build_path, self._binhost_conf_dir,
488 'host', '%s-%s.conf' % (_HOST_ARCH, key))
489 UpdateBinhostConfFile(binhost_conf, key, binhost)
491 def SyncBoardPrebuilts(self, version, key, git_sync, sync_binhost_conf,
492 upload_board_tarball, prepackaged_board,
493 toolchain_tarballs, toolchain_upload_path):
494 """Synchronize board prebuilt files.
497 version: A unique string, intended to be included in the upload path,
498 which identifies the version number of the uploaded prebuilts.
499 key: The variable key to update in the git file.
500 git_sync: If set, update make.conf of target to reference the latest
501 prebuilt packages generated here.
502 sync_binhost_conf: If set, update binhost config file in
503 chromiumos-overlay for the current board.
504 upload_board_tarball: Include a tarball of the board in our upload.
505 prepackaged_board: A tarball of the board built outside of this script.
506 toolchain_tarballs: A list of toolchain tarballs to upload.
507 toolchain_upload_path: Path under the bucket to place toolchain tarballs.
509 for target in self._GetTargets():
510 board_path = os.path.join(self._build_path,
511 _BOARD_PATH % {'board': target.board_variant})
512 package_path = os.path.join(board_path, 'packages')
513 url_suffix = _REL_BOARD_PATH % {'target': target, 'version': version}
514 packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
516 # Process the target board differently if it is the main --board.
517 if self._target == target and not self._skip_upload and not self._debug:
518 # This strips "chroot" prefix because that is sometimes added as the
519 # --prepend-version argument (e.g. by chromiumos-sdk bot).
520 # TODO(build): Clean it up to be less hard-coded.
521 version_str = version[len('chroot-'):]
523 # Upload board tarballs in the background.
524 if upload_board_tarball:
525 if toolchain_upload_path:
526 toolchain_upload_path %= {'version': version_str}
527 tar_process = multiprocessing.Process(
528 target=self._UploadSdkTarball,
529 args=(board_path, url_suffix, version, prepackaged_board,
530 toolchain_tarballs, toolchain_upload_path))
534 self._UploadPrebuilt(package_path, packages_url_suffix)
536 # Make sure we finished uploading the board tarballs.
537 if upload_board_tarball:
539 assert tar_process.exitcode == 0
540 # TODO(zbehan): This should be done cleaner.
541 if target.board == constants.CHROOT_BUILDER_BOARD:
542 sdk_conf = os.path.join(self._build_path, self._binhost_conf_dir,
543 'host/sdk_version.conf')
545 'SDK_LATEST_VERSION': version_str,
546 'TC_PATH': toolchain_upload_path,
548 RevGitFile(sdk_conf, sdk_settings, dryrun=self._debug)
550 # Record URL where prebuilts were uploaded.
551 url_value = '%s/%s/' % (self._binhost_base_url.rstrip('/'),
552 packages_url_suffix.rstrip('/'))
555 git_file = DeterminePrebuiltConfFile(self._build_path, target)
556 RevGitFile(git_file, {key: url_value}, dryrun=self._debug)
558 if sync_binhost_conf:
559 # Update the binhost configuration file in git.
560 binhost_conf = os.path.join(self._build_path, self._binhost_conf_dir,
561 'target', '%s-%s.conf' % (target, key))
562 UpdateBinhostConfFile(binhost_conf, key, url_value)
565 def Usage(parser, msg):
566 """Display usage message and parser help then exit with 1."""
567 print >> sys.stderr, msg
572 def _AddSlaveBoard(_option, _opt_str, value, parser):
573 """Callback that adds a slave board to the list of slave targets."""
574 parser.values.slave_targets.append(BuildTarget(value))
577 def _AddSlaveProfile(_option, _opt_str, value, parser):
578 """Callback that adds a slave profile to the list of slave targets."""
579 if not parser.values.slave_targets:
580 Usage(parser, 'Must specify --slave-board before --slave-profile')
581 if parser.values.slave_targets[-1].profile is not None:
582 Usage(parser, 'Cannot specify --slave-profile twice for same board')
583 parser.values.slave_targets[-1].profile = value
587 """Returns options given by the user and the target specified.
590 A tuple containing a parsed options object and BuildTarget.
591 The target instance is None if no board is specified.
593 parser = optparse.OptionParser()
594 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
595 default=_BINHOST_BASE_URL,
596 help='Base URL to use for binhost in make.conf updates')
597 parser.add_option('', '--previous-binhost-url', action='append',
598 default=[], dest='previous_binhost_url',
599 help='Previous binhost URL')
600 parser.add_option('-b', '--board', dest='board', default=None,
601 help='Board type that was built on this machine')
602 parser.add_option('-B', '--prepackaged-tarball', dest='prepackaged_tarball',
604 help='Board tarball prebuilt outside of this script.')
605 parser.add_option('--toolchain-tarball', dest='toolchain_tarballs',
606 action='append', default=[],
607 help='Redistributable toolchain tarball.')
608 parser.add_option('--toolchain-upload-path', default='',
609 help='Path to place toolchain tarballs in the sdk tree.')
610 parser.add_option('', '--profile', dest='profile', default=None,
611 help='Profile that was built on this machine')
612 parser.add_option('', '--slave-board', default=[], action='callback',
613 dest='slave_targets', type='string',
614 callback=_AddSlaveBoard,
615 help='Board type that was built on a slave machine. To '
616 'add a profile to this board, use --slave-profile.')
617 parser.add_option('', '--slave-profile', action='callback', type='string',
618 callback=_AddSlaveProfile,
619 help='Board profile that was built on a slave machine. '
620 'Applies to previous slave board.')
621 parser.add_option('-p', '--build-path', dest='build_path',
622 help='Path to the directory containing the chroot')
623 parser.add_option('', '--packages', action='append',
624 default=[], dest='packages',
625 help='Only include the specified packages. '
626 '(Default is to include all packages.)')
627 parser.add_option('-s', '--sync-host', dest='sync_host',
628 default=False, action='store_true',
629 help='Sync host prebuilts')
630 parser.add_option('-g', '--git-sync', dest='git_sync',
631 default=False, action='store_true',
632 help='Enable git version sync (This commits to a repo.) '
633 'This is used by full builders to commit directly '
634 'to board overlays.')
635 parser.add_option('-u', '--upload', dest='upload',
637 help='Upload location')
638 parser.add_option('-V', '--prepend-version', dest='prepend_version',
640 help='Add an identifier to the front of the version')
641 parser.add_option('-f', '--filters', dest='filters', action='store_true',
643 help='Turn on filtering of private ebuild packages')
644 parser.add_option('-k', '--key', dest='key',
645 default='PORTAGE_BINHOST',
646 help='Key to update in make.conf / binhost.conf')
647 parser.add_option('', '--set-version', dest='set_version',
649 help='Specify the version string')
650 parser.add_option('', '--sync-binhost-conf', dest='sync_binhost_conf',
651 default=False, action='store_true',
652 help='Update binhost.conf in chromiumos-overlay or '
653 'chromeos-overlay. Commit the changes, but don\'t '
654 'push them. This is used for preflight binhosts.')
655 parser.add_option('', '--binhost-conf-dir', dest='binhost_conf_dir',
656 default=_BINHOST_CONF_DIR,
657 help='Directory to commit binhost config with '
658 '--sync-binhost-conf.')
659 parser.add_option('-P', '--private', dest='private', action='store_true',
660 default=False, help='Mark gs:// uploads as private.')
661 parser.add_option('', '--skip-upload', dest='skip_upload',
662 action='store_true', default=False,
663 help='Skip upload step.')
664 parser.add_option('', '--upload-board-tarball', dest='upload_board_tarball',
665 action='store_true', default=False,
666 help='Upload board tarball to Google Storage.')
667 parser.add_option('', '--debug', dest='debug',
668 action='store_true', default=False,
669 help='Don\'t push or upload prebuilts.')
671 options, args = parser.parse_args()
672 if not options.build_path:
673 Usage(parser, 'Error: you need provide a chroot path')
674 if not options.upload and not options.skip_upload:
675 Usage(parser, 'Error: you need to provide an upload location using -u')
676 if not options.set_version and options.skip_upload:
677 Usage(parser, 'Error: If you are using --skip-upload, you must specify a '
678 'version number using --set-version.')
680 Usage(parser, 'Error: invalid arguments passed to upload_prebuilts: '
685 target = BuildTarget(options.board, options.profile)
687 if target in options.slave_targets:
688 Usage(parser, 'Error: --board/--profile must not also be a slave target.')
690 if len(set(options.slave_targets)) != len(options.slave_targets):
691 Usage(parser, 'Error: --slave-boards must not have duplicates.')
693 if options.slave_targets and options.git_sync:
694 Usage(parser, 'Error: --slave-boards is not compatible with --git-sync')
696 if (options.upload_board_tarball and options.skip_upload and
697 options.board == 'amd64-host'):
698 Usage(parser, 'Error: --skip-upload is not compatible with '
699 '--upload-board-tarball and --board=amd64-host')
701 if (options.upload_board_tarball and not options.skip_upload and
702 not options.upload.startswith('gs://')):
703 Usage(parser, 'Error: --upload-board-tarball only works with gs:// URLs.\n'
704 '--upload must be a gs:// URL.')
706 if options.upload_board_tarball and options.prepackaged_tarball is None:
707 Usage(parser, 'Error: --upload-board-tarball requires '
708 '--prepackaged-tarball.')
711 if options.sync_host:
712 Usage(parser, 'Error: --private and --sync-host/-s cannot be specified '
713 'together, we do not support private host prebuilts')
715 if not options.upload or not options.upload.startswith('gs://'):
716 Usage(parser, 'Error: --private is only valid for gs:// URLs.\n'
717 '--upload must be a gs:// URL.')
719 if options.binhost_base_url != _BINHOST_BASE_URL:
720 Usage(parser, 'Error: when using --private the --binhost-base-url '
721 'is automatically derived.')
723 return options, target
726 # Set umask to a sane value so that files created as root are readable.
729 options, target = ParseOptions()
731 # Calculate a list of Packages index files to compare against. Whenever we
732 # upload a package, we check to make sure it's not already stored in one of
733 # the packages files we uploaded. This list of packages files might contain
734 # both board and host packages.
735 pkg_indexes = _GrabAllRemotePackageIndexes(options.previous_binhost_url)
737 if options.set_version:
738 version = options.set_version
740 version = GetVersion()
742 if options.prepend_version:
743 version = '%s-%s' % (options.prepend_version, version)
746 binhost_base_url = options.binhost_base_url
749 binhost_base_url = options.upload
751 board_path = GetBoardOverlay(options.build_path, target)
752 acl = os.path.join(board_path, _GOOGLESTORAGE_ACL_FILE)
754 uploader = PrebuiltUploader(options.upload, acl, binhost_base_url,
755 pkg_indexes, options.build_path,
756 options.packages, options.skip_upload,
757 options.binhost_conf_dir, options.debug,
758 target, options.slave_targets)
760 if options.sync_host:
761 uploader.SyncHostPrebuilts(version, options.key, options.git_sync,
762 options.sync_binhost_conf)
764 if options.board or options.slave_targets:
765 uploader.SyncBoardPrebuilts(version, options.key, options.git_sync,
766 options.sync_binhost_conf,
767 options.upload_board_tarball,
768 options.prepackaged_tarball,
769 options.toolchain_tarballs,
770 options.toolchain_upload_path)