2 # Copyright (c) 2014 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """This script handles all of the processing for versioning packages.
8 package_version.py manages all of the various operations done between
9 packages, including archiving, extracting, uploading, and downloading
10 packages. For a list of options and commands, see the help for the script.
13 Package: A list of archives, such as "nacl_x86_glibc" or "nacl_x86_newlib".
14 Package Archive: An archive (usually a tar file) that is part of a package.
15 Package Target: Package targets consists of packages. Each package target
16 has it's own version of a package. An example of a package target would
17 be something such as "win_x86_nacl_x86" or "mac_x86_nacl_x86". In that case,
18 "win_x86_nacl_x86" and "mac_x86_nacl_x86" would each have their own version
19 of "nacl_x86_glibc" and "nacl_x86_newlib" for windows and mac respectively.
20 Revision Number: The SVN revision number of a sanctioned version. This number
21 is used to synchronize packages to sanctioned versions.
24 Packages File - A file which describes the various package targets for each
25 platform/architecture along with the packages associated with each package
27 [Default file: build/package_version/standard_packages.json].
28 Package File - A file which contains the list of package archives within
30 [Default file: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE.json]
31 Archive File - A file which describes an archive within a package. Each
32 archive description file will contain information about an archive such
33 as name, URL to download from, and hash.
34 [Default File: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE/$ARCHIVE.json]
35 Revision File - A file which describes the sanctioned version of package
36 for each of the package targets associated with it.
37 [Default file: toolchain_revisions/$PACKAGE.json]
47 sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
50 sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
51 import pynacl.file_tools
52 import pynacl.gsd_storage
53 import pynacl.log_tools
54 import pynacl.platform
55 import pynacl.working_directory
59 import package_locations
63 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
64 BUILD_DIR = os.path.dirname(CURRENT_DIR)
65 NACL_DIR = os.path.dirname(BUILD_DIR)
69 DEFAULT_PACKAGES_JSON = os.path.join(CURRENT_DIR, 'standard_packages.json')
70 DEFAULT_REVISIONS_DIR = os.path.join(NACL_DIR, 'toolchain_revisions')
71 DEFAULT_DEST_DIR = os.path.join(NACL_DIR, 'toolchain')
72 DEFAULT_CLOUD_BUCKET = 'nativeclient-archive2'
75 def RemoveOldToolchainFiles(toolchain_dir):
76 """Temporary code to delete old toolchain files."""
77 if not os.path.isdir(toolchain_dir):
80 # Delete old tar files
81 tars_dir = os.path.join(toolchain_dir, '.tars')
82 if os.path.isdir(tars_dir):
83 for tar_item in os.listdir(tars_dir):
84 tar_path = os.path.join(tars_dir, tar_item)
85 if os.path.isfile(tar_path):
86 print 'Removing stale tar file:', tar_path
89 # Delete any top level directories that do not conform to $OS_$ARCH.
90 valid_top_level_dirs = set()
91 for os_name in pynacl.platform.OS_LIST:
92 for arch_name in pynacl.platform.ARCH_LIST:
93 valid_top_level_dirs.add('%s_%s' % (os_name, arch_name))
95 # Delete any files and directories that do not conform to the standard.
96 # Do not touch any system files that begin with '.', including ".tars".
98 for dir_item in os.listdir(toolchain_dir):
99 if dir_item.startswith('.'):
102 full_path = os.path.join(toolchain_dir, dir_item)
103 if dir_item in valid_top_level_dirs:
104 top_level_dirs.append(full_path)
106 print 'Removing stale toolchain item:', full_path
107 if os.path.isfile(full_path):
110 pynacl.file_tools.RemoveDir(full_path)
112 # Delete any second level dirs that do not conform one of the following:
113 # 1. It must be of the format "nacl_*".
114 # 2. It must be of the format "pnacl_*".
115 # 3. It must be of the format "*_trusted".
116 for top_level_dir in top_level_dirs:
117 for dir_item in os.listdir(top_level_dir):
118 if (dir_item.startswith('nacl_') or
119 dir_item.startswith('pnacl_') or
120 dir_item.endswith('_trusted')):
123 full_path = os.path.join(top_level_dir, dir_item)
124 print 'Removing stale toolchain item:', full_path
125 if os.path.isfile(full_path):
128 pynacl.file_tools.RemoveDir(full_path)
131 # These are helper functions that help each command.
134 def CleanTempFiles(directory):
135 """Cleans up all temporary files ending with TEMP_SUFFIX in a directory."""
136 for root, dirs, files in os.walk(directory):
137 for file_name in files:
138 if file_name.endswith(TEMP_SUFFIX):
139 file_path = os.path.join(root, file_name)
143 def GetPackageTargetPackages(custom_package_name, package_target_packages):
144 """Returns a list of package target packages given a custom package name.
146 A custom package name can either have a specified package target attached
147 to it (IE. $PACKAGE_TARGET/PACKAGE_NAME) or be extracted out of a default
148 list of package targets along with their packages.
151 custom_package_name: Package name with an optional package target.
152 package_target_packages: List of tuples (package_target, package).
154 List of package target tuples matching the package name.
156 package_path = custom_package_name.replace('\\', os.path.sep)
157 package_path = package_path.replace('/', os.path.sep)
158 if os.path.sep in package_path:
159 # Package target is part of the custom package name, just return it.
160 package_target, package_name = package_path.split(os.path.sep, 1)
161 return [(package_target, package_name)]
163 # Package target is not part of the package name, filter from list of passed
164 # in package targets.
166 (package_target, package)
167 for package_target, package in package_target_packages
168 if package == custom_package_name
172 def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
173 downloader=None, revision_num=None):
174 """Downloads package archives from the cloud to the tar directory.
177 tar_dir: Root tar directory where archives will be downloaded to.
178 package_target: Package target of the package to download.
179 package_name: Package name of the package to download.
180 package_desc: package_info object of the package to download.
181 downloader: function which takes a url and a file path for downloading.
183 The list of files that were downloaded.
185 downloaded_files = []
186 if downloader is None:
187 downloader = pynacl.gsd_storage.HttpDownload
188 local_package_file = package_locations.GetLocalPackageFile(tar_dir,
191 # To ensure that we do not redownload extra archives that we already have,
192 # create a dictionary of old package archives that contains the hash of each
195 if os.path.isfile(local_package_file):
197 old_package_desc = package_info.PackageInfo(local_package_file)
198 old_archives_list = old_package_desc.GetArchiveList()
199 old_archive_names = [archive.GetArchiveData().name
201 in old_archives_list]
202 for archive_name in old_archive_names:
203 archive_file = package_locations.GetLocalPackageArchiveFile(
210 archive_hash = archive_info.GetArchiveHash(archive_file)
211 if archive_hash is not None:
212 old_archives[archive_name] = archive_hash
214 # Nothing can be trusted here anymore, delete all package archives.
215 archive_directory = package_locations.GetLocalPackageArchiveDir(
220 os.unlink(local_package_file)
221 pynacl.file_tools.RemoveDir(archive_directory)
223 # Download packages information file along with each of the package
224 # archives described in the information file. Also keep track of what
225 # new package names matches old package names. We will have to delete
226 # stale package names after we are finished.
228 for archive_obj in package_desc.GetArchiveList():
229 archive_desc = archive_obj.GetArchiveData()
230 old_hash = old_archives.get(archive_desc.name, None)
231 if old_hash is not None:
232 old_archives.pop(archive_desc.name)
233 if archive_desc.hash == old_hash:
234 logging.debug('Skipping matching archive: %s', archive_desc.name)
236 update_archives.append(archive_obj)
239 logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
240 num_archives = len(update_archives)
241 for index, archive_obj in enumerate(update_archives):
242 archive_desc = archive_obj.GetArchiveData()
243 local_archive_file = package_locations.GetLocalPackageArchiveFile(
249 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
251 if archive_desc.url is None:
252 raise IOError('Error, no URL for archive: %s' % archive_desc.name)
254 logging.info('Downloading package archive: %s (%d/%d)' %
255 (archive_desc.name, index+1, num_archives))
257 downloader(archive_desc.url, local_archive_file)
258 except Exception as e:
259 raise IOError('Could not download URL (%s): %s' %
260 (archive_desc.url, e))
262 verified_hash = archive_info.GetArchiveHash(local_archive_file)
263 if verified_hash != archive_desc.hash:
264 raise IOError('Package hash check failed: %s != %s' %
265 (verified_hash, archive_desc.hash))
267 downloaded_files.append(local_archive_file)
269 # Delete any stale left over packages.
270 for old_archive in old_archives:
271 archive_file = package_locations.GetLocalPackageArchiveFile(
276 os.unlink(archive_file)
278 # Save the package file so we know what we currently have.
279 package_desc.SavePackageFile(local_package_file)
281 return downloaded_files
284 def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
286 """Archives local package archives to the tar directory.
289 tar_dir: Root tar directory where archives live.
290 package_target: Package target of the package to archive.
291 package_name: Package name of the package to archive.
292 archives: List of archive file paths where archives currently live.
293 extra_archives: Extra archives that are expected to be build elsewhere.
295 Returns the local package file that was archived.
297 local_package_file = package_locations.GetLocalPackageFile(tar_dir,
301 valid_archive_files = set()
304 package_desc = package_info.PackageInfo()
305 package_archives = ([(archive, False) for archive in archives] +
306 [(archive, True) for archive in extra_archives])
307 for archive, skip_missing in package_archives:
310 archive, archive_url = archive.split('@', 1)
316 archive, extract_param = archive.split(',', 1)
317 if ':' in extract_param:
318 tar_src_dir, extract_dir = extract_param.split(':', 1)
320 tar_src_dir = extract_param
322 archive_hash = archive_info.GetArchiveHash(archive)
323 archive_name = os.path.basename(archive)
324 archive_desc = archive_info.ArchiveInfo(archive_name,
327 tar_src_dir=tar_src_dir,
328 extract_dir=extract_dir)
329 package_desc.AppendArchive(archive_desc)
331 if archive_hash is None:
333 logging.info('Skipping archival of missing file: %s', archive)
335 raise IOError('Invalid package: %s.' % archive)
336 archive_list.append(archive)
338 archive_basename = os.path.basename(archive)
339 archive_json = archive_basename + '.json'
340 valid_archive_files.update([archive_basename, archive_json])
342 # Delete any stale archive files
343 local_archive_dir = package_locations.GetLocalPackageArchiveDir(
348 if os.path.isdir(local_archive_dir):
349 for dir_item in os.listdir(local_archive_dir):
350 if dir_item in valid_archive_files:
353 item_path = os.path.join(local_archive_dir, dir_item)
354 if os.path.isdir(item_path):
355 pynacl.file_tools.RemoveDir(item_path)
357 pynacl.file_tools.RemoveFile(item_path)
359 # We do not need to archive the package if it already matches. But if the
360 # local package file is invalid or does not match, then we should recreate
362 if os.path.isfile(local_package_file):
364 current_package_desc = package_info.PackageInfo(local_package_file,
366 if current_package_desc == package_desc:
371 # Copy each of the packages over to the tar directory first.
372 for archive_file in archive_list:
373 archive_name = os.path.basename(archive_file)
374 local_archive_file = package_locations.GetLocalPackageArchiveFile(
380 logging.info('Archiving file: %s', archive_file)
381 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
382 shutil.copyfile(archive_file, local_archive_file)
384 # Once all the copying is completed, update the local packages file.
385 logging.info('Package "%s" archived: %s', package_name, local_package_file)
386 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
387 package_desc.SavePackageFile(local_package_file)
389 return local_package_file
392 def UploadPackage(storage, revision, tar_dir, package_target, package_name,
393 is_shared_package, annotate=False, skip_missing=False,
394 custom_package_file=None):
395 """Uploads a local package file to the supplied cloud storage object.
397 By default local package files are expected to be found in the standardized
398 location within the tar directory, however a custom package file may be
399 specified to upload from a different location. Package archives that do not
400 have their URL field set will automaticaly have the archives uploaded so that
401 someone accessing the package file from the cloud storage will also have
402 access to the package archives.
405 storage: Cloud storage object which supports PutFile and GetFile.
406 revision: SVN Revision number the package should be associated with.
407 tar_dir: Root tar directory where archives live.
408 package_target: Package target of the package to archive.
409 package_name: Package name of the package to archive.
410 is_shared_package: Is this package shared among all package targets?
411 annotate: Print annotations for build bots?
412 skip_missing: Skip missing package archive files?
413 custom_package_file: File location for a custom package file.
415 Returns remote download key for the uploaded package file.
417 if custom_package_file is not None:
418 local_package_file = custom_package_file
420 local_package_file = package_locations.GetLocalPackageFile(
425 # Upload the package file and also upload any local package archives so
426 # that they are downloadable.
427 package_desc = package_info.PackageInfo(local_package_file,
428 skip_missing=skip_missing)
429 upload_package_desc = package_info.PackageInfo()
431 for archive_obj in package_desc.GetArchiveList():
432 archive_desc = archive_obj.GetArchiveData()
433 url = archive_desc.url
434 if archive_desc.hash and url is None:
436 print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name
438 archive_file = package_locations.GetLocalPackageArchiveFile(
443 archive_hash = archive_info.GetArchiveHash(archive_file)
444 if archive_hash is None:
445 raise IOError('Missing Archive File: %s' % archive_file)
446 elif archive_hash != archive_desc.hash:
448 'Archive hash does not match package hash: %s' % archive_file
449 + '\n Archive Hash: %s' % archive_hash
450 + '\n Package Hash: %s' % archive_desc.hash)
452 logging.warn('Missing archive URL: %s', archive_desc.name)
453 logging.warn('Uploading archive to be publically available...')
454 remote_archive_key = package_locations.GetRemotePackageArchiveKey(
457 url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
459 print '@@@STEP_LINK@download@%s@@@' % url
461 archive_desc = archive_info.ArchiveInfo(
465 tar_src_dir=archive_desc.tar_src_dir,
466 extract_dir=archive_desc.extract_dir)
467 upload_package_desc.AppendArchive(archive_desc)
469 upload_package_file = local_package_file + '.upload'
470 pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
471 upload_package_desc.SavePackageFile(upload_package_file)
473 logging.info('Uploading package information: %s', package_name)
474 remote_package_key = package_locations.GetRemotePackageKey(
479 package_info.UploadPackageInfoFiles(storage, package_target, package_name,
480 remote_package_key, upload_package_file,
481 skip_missing=skip_missing,
484 return remote_package_key
487 def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
488 downloader=None, skip_missing=False, quiet=False):
489 """Extracts package targets from the tar directory to the destination.
491 Each package archive within a package will be verified before being
492 extracted. If a package archive does not exist or does not match the hash
493 stored within the package file, it will be re-downloaded before being
497 package_target_packages: List of tuples of package target and package names.
498 tar_dir: Source tar directory where package archives live.
499 dest_dir: Root destination directory where packages will be extracted to.
500 downloader: function which takes a url and a file path for downloading.
502 if downloader is None:
503 downloader = pynacl.gsd_storage.HttpDownload
505 for package_target, package_name in package_target_packages:
506 package_file = package_locations.GetLocalPackageFile(tar_dir,
509 package_desc = package_info.PackageInfo(package_file,
510 skip_missing=skip_missing)
511 dest_package_dir = package_locations.GetFullDestDir(dest_dir,
514 dest_package_file = package_locations.GetDestPackageFile(dest_dir,
518 # Only do the extraction if the extract packages do not match.
519 if os.path.isfile(dest_package_file):
521 dest_package_desc = package_info.PackageInfo(dest_package_file)
522 if dest_package_desc == package_desc:
523 logging.debug('Skipping extraction for package (%s)', package_name)
526 # Destination package file cannot be trusted, if invalid re-extract.
529 # Delete the old package file before we extract.
530 os.unlink(dest_package_file)
532 if os.path.isdir(dest_package_dir):
533 logging.debug('Deleting old package directory: %s', dest_package_dir)
534 pynacl.file_tools.RemoveDir(dest_package_dir)
536 logging.info('Extracting package (%s) to directory: %s',
537 package_name, dest_package_dir)
538 archive_list = package_desc.GetArchiveList()
539 num_archives = len(archive_list)
540 for index, archive_obj in enumerate(archive_list):
541 archive_desc = archive_obj.GetArchiveData()
542 archive_file = package_locations.GetLocalPackageArchiveFile(
549 # Upon extraction, some files may not be downloaded (or have stale files),
550 # we need to check the hash of each file and attempt to download it if
552 archive_hash = archive_info.GetArchiveHash(archive_file)
553 if archive_hash != archive_desc.hash:
554 if archive_desc.url is None:
556 logging.info('Skipping extraction of missing archive: %s' %
559 raise IOError('Invalid archive file and URL: %s' % archive_file)
561 logging.warn('Expected archive missing, downloading: %s',
564 pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
565 downloader(archive_desc.url, archive_file)
566 archive_hash = archive_info.GetArchiveHash(archive_file)
567 if archive_hash != archive_desc.hash:
568 raise IOError('Downloaded archive file does not match hash.'
569 ' [%s] Expected %s, received %s.' %
570 (archive_file, archive_desc.hash, archive_hash))
572 destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
573 logging.info('Extracting %s (%d/%d)' %
574 (archive_desc.name, index+1, num_archives))
576 temp_dir = os.path.join(destination_dir, '.tmp')
577 pynacl.file_tools.RemoveDir(temp_dir)
578 os.makedirs(temp_dir)
579 tar_output = not quiet
580 tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
589 temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
590 pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
591 pynacl.file_tools.RemoveDir(temp_dir)
593 pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
594 package_desc.SavePackageFile(dest_package_file)
598 # Each Command has 2 functions that describes it:
599 # 1. A parser function which specifies the extra command options each command
601 # 2. An execution function which is called when a user actually executes
604 def _ListCmdArgParser(subparser):
605 subparser.description = 'Lists package information.'
608 def _DoListCmd(arguments):
609 package_targets = collections.defaultdict(list)
610 for package_target, package in arguments.package_target_packages:
611 package_targets[package_target].append(package)
613 print 'Listing Package Targets and Packages:'
614 for package_target, packages in package_targets.iteritems():
615 print '\n%s:' % package_target
616 for package in sorted(packages):
620 def _ArchiveCmdArgParser(subparser):
621 subparser.description = 'Archive package archives to tar directory.'
622 subparser.add_argument(
623 '--archive-package', metavar='NAME', dest='archive__package',
625 help='Package name archives will be packaged into.')
626 subparser.add_argument(
627 '--extra-archive', metavar='ARCHIVE', dest='archive__extra_archive',
628 action='append', default=[],
629 help='Extra archives that are expected to be built elsewhere.')
630 subparser.add_argument(
631 metavar='TAR(,SRCDIR(:EXTRACTDIR))(@URL)', dest='archive__archives',
633 help='Package archive with an optional tar information and url.'
634 ' SRCDIR is the root directory where files live inside of the tar.'
635 ' EXTRACTDIR is the directory to extract files to relative to the'
636 ' destination directory. The URL is where the package can be'
638 subparser.add_argument(
639 '-x', '--extract', dest='archive__extract',
640 action='store_true', default=False,
641 help='Extract package archives after they have been archived.')
644 def _DoArchiveCmd(arguments):
645 package_target_packages = GetPackageTargetPackages(
646 arguments.archive__package,
647 arguments.package_target_packages
649 if not package_target_packages:
650 raise NameError('Unknown package: %s.' % arguments.archive__package
651 + ' Did you forget to add "$PACKAGE_TARGET/"?')
653 for package_target, package_name in package_target_packages:
654 ArchivePackageArchives(arguments.tar_dir,
657 arguments.archive__archives,
658 extra_archives=arguments.archive__extra_archive)
660 if arguments.archive__extract:
661 ExtractPackageTargets([(package_target, package_name)],
665 quiet=arguments.quiet)
668 def _ExtractCmdArgParser(subparser):
669 subparser.description = 'Extract packages from tar directory.'
672 def _DoExtractCmd(arguments):
673 ExtractPackageTargets(
674 arguments.package_target_packages,
677 quiet=arguments.quiet)
680 def _UploadCmdArgParser(subparser):
681 subparser.description = 'Upload a package file.'
682 subparser.add_argument(
683 '--upload-package', metavar='NAME', dest='upload__package', required=True,
684 help='Package to upload.')
685 subparser.add_argument(
686 '--revision', metavar='NUM', dest='upload__revision', required=True,
687 help='SVN Revision of the package to upload.')
688 subparser.add_argument(
689 '--package-file', metavar='FILE', dest='upload__file',
691 help='Use custom package file instead of standard package file found'
692 ' in the tar directory.')
693 subparser.add_argument(
694 '--skip-missing', dest='upload__skip_missing',
695 action='store_true', default=False,
696 help='Skip missing archive files when uploading package archives.')
699 def _DoUploadCmd(arguments):
700 package_target_packages = GetPackageTargetPackages(
701 arguments.upload__package,
702 arguments.package_target_packages
704 if not package_target_packages:
705 raise NameError('Unknown package: %s.' % arguments.upload__package
706 + ' Did you forget to add "$PACKAGE_TARGET/"?')
708 for package_target, package_name in package_target_packages:
711 arguments.upload__revision,
715 arguments.packages_desc.IsSharedPackage(package_name),
716 annotate=arguments.annotate,
717 skip_missing=arguments.upload__skip_missing,
718 custom_package_file=arguments.upload__file
722 def _SyncCmdArgParser(subparser):
723 subparser.description = 'Download package archives to the tar directory.'
724 subparser.add_argument(
725 '--revision', metavar='NUM', dest='sync__revision',
727 help='SVN Revision of the packages to download.')
728 subparser.add_argument(
729 '-x', '--extract', dest='sync__extract',
730 action='store_true', default=False,
731 help='Extract package arcvhies after they have been downloaded.')
734 def _DoSyncCmd(arguments):
735 # TODO(dyen): remove this section eventually.
736 # Before syncing, remove any old toolchain files temporarily.
737 RemoveOldToolchainFiles(arguments.dest_dir)
739 for package_target, package_name in arguments.package_target_packages:
740 if arguments.sync__revision is None:
741 # When the sync revision number is not specified, use the set
742 # revision number found in the revision directory.
743 revision_file = package_locations.GetRevisionFile(
744 arguments.revisions_dir,
746 revision_desc = revision_info.RevisionInfo(
747 arguments.packages_desc,
749 package_desc = revision_desc.GetPackageInfo(package_target)
750 revision_num = revision_desc.GetRevisionNumber()
752 # When the sync revision number is specified, find the package to
753 # download remotely using the revision.
754 revision_num = arguments.sync__revision
755 remote_package_key = package_locations.GetRemotePackageKey(
756 arguments.packages_desc.IsSharedPackage(package_name),
757 arguments.sync__revision,
760 with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
761 temp_package_file = os.path.join(
763 os.path.basename(remote_package_key) + TEMP_SUFFIX)
765 package_info.DownloadPackageInfoFiles(
768 downloader=arguments.gsd_store.GetFile)
770 package_desc = package_info.PackageInfo(temp_package_file)
772 DownloadPackageArchives(
777 revision_num=revision_num)
779 CleanTempFiles(arguments.tar_dir)
781 if arguments.sync__extract:
782 ExtractPackageTargets(
783 arguments.package_target_packages,
786 quiet=arguments.quiet)
789 def _SetRevisionCmdArgParser(subparser):
790 subparser.description = 'Specify the revision of a package.'
791 subparser.add_argument(
792 '--revision-package', metavar='NAME', dest='setrevision__package',
794 help='Package name to set revision of.')
795 subparser.add_argument(
796 '--revision', metavar='NUM', dest='setrevision__revision',
797 type=int, required=True,
798 help='SVN Revision of the package to set.')
801 def _DoSetRevisionCmd(arguments):
802 package_name = arguments.setrevision__package
803 revision_num = arguments.setrevision__revision
805 revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
806 revision_desc.SetRevisionNumber(revision_num)
808 custom_package_targets = GetPackageTargetPackages(package_name, [])
809 if not custom_package_targets:
810 package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
813 package_targets = [target[0] for target in custom_package_targets]
814 first_target = custom_package_targets[0]
815 package_name = first_target[1]
817 with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
818 for package_target in package_targets:
819 remote_package_key = package_locations.GetRemotePackageKey(
820 arguments.packages_desc.IsSharedPackage(package_name),
825 temp_package_file = os.path.join(
827 os.path.basename(remote_package_key) + TEMP_SUFFIX)
829 package_info.DownloadPackageInfoFiles(
832 downloader=arguments.gsd_store.GetFile)
834 package_desc = package_info.PackageInfo(temp_package_file)
836 logging.info('Setting %s:%s to revision %s',
837 package_target, package_name, revision_num)
838 revision_desc.SetTargetRevision(
843 revision_file = package_locations.GetRevisionFile(
844 arguments.revisions_dir,
846 pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
847 revision_desc.SaveRevisionFile(revision_file)
849 CleanTempFiles(arguments.revisions_dir)
852 def _GetRevisionCmdArgParser(subparser):
853 subparser.description = 'Get the revision of a package.'
854 subparser.add_argument(
855 '--revision-package', metavar='NAME', dest='getrevision__package',
857 help='Package name to get revision of.')
860 def _DoGetRevisionCmd(arguments):
861 package_name = arguments.getrevision__package
863 custom_package_targets = GetPackageTargetPackages(package_name, [])
864 if custom_package_targets:
865 custom_target, package_name = custom_package_targets[0]
867 revision_file = package_locations.GetRevisionFile(arguments.revisions_dir,
870 if not os.path.isfile(revision_file):
871 raise NameError('No revision set for package: %s.' % package_name)
873 revision_desc = revision_info.RevisionInfo(arguments.packages_desc,
875 print revision_desc.GetRevisionNumber()
878 def _FillEmptyTarsParser(subparser):
879 subparser.description = 'Fill missing archives with empty ones in a package.'
880 subparser.add_argument(
881 '--fill-package', metavar='NAME', dest='fillemptytars_package',
883 help='Package name to fill empty archives of.')
886 def _DoFillEmptyTarsCmd(arguments):
887 package_target_packages = GetPackageTargetPackages(
888 arguments.fillemptytars_package,
889 arguments.package_target_packages
891 if not package_target_packages:
892 raise NameError('Unknown package: %s.' % arguments.fillemptytars_package
893 + ' Did you forget to add "$PACKAGE_TARGET/"?')
895 for package_target, package_name in package_target_packages:
896 package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
900 package_desc = package_info.PackageInfo(package_path, skip_missing=True)
901 output_package_desc = package_info.PackageInfo()
902 for archive in package_desc.GetArchiveList():
903 # If archive does not exist, fill it with an empty one.
904 archive_data = archive.GetArchiveData()
905 if archive_data.hash:
906 output_package_desc.AppendArchive(archive)
908 logging.info('Filling missing archive: %s.', archive_data.name)
909 if (archive_data.name.endswith('.tar.gz') or
910 archive_data.name.endswith('.tgz')):
912 elif archive_data.name.endswith('.bz2'):
914 elif archive_data.name.endswith('.tar'):
917 raise NameError('Unknown archive type: %s.' % archive_data.name)
919 archive_file = package_locations.GetLocalPackageArchiveFile(
926 tar_file = cygtar.CygTar(archive_file, mode)
928 tar_hash = archive_info.GetArchiveHash(archive_file)
930 empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
931 archive_hash=tar_hash)
932 output_package_desc.AppendArchive(empty_archive)
934 output_package_desc.SavePackageFile(package_path)
937 CommandFuncs = collections.namedtuple(
939 ['parse_func', 'do_cmd_func'])
943 'list': CommandFuncs(_ListCmdArgParser, _DoListCmd),
944 'archive': CommandFuncs(_ArchiveCmdArgParser, _DoArchiveCmd),
945 'extract': CommandFuncs(_ExtractCmdArgParser, _DoExtractCmd),
946 'upload': CommandFuncs(_UploadCmdArgParser, _DoUploadCmd),
947 'sync': CommandFuncs(_SyncCmdArgParser, _DoSyncCmd),
948 'setrevision': CommandFuncs(_SetRevisionCmdArgParser, _DoSetRevisionCmd),
949 'getrevision': CommandFuncs(_GetRevisionCmdArgParser, _DoGetRevisionCmd),
950 'fillemptytars': CommandFuncs(_FillEmptyTarsParser, _DoFillEmptyTarsCmd),
955 parser = argparse.ArgumentParser()
957 host_platform = pynacl.platform.GetOS()
958 host_arch = pynacl.platform.GetArch3264()
960 # List out global options for all commands.
962 '-v', '--verbose', dest='verbose',
963 action='store_true', default=False,
964 help='Verbose output')
966 '-q', '--quiet', dest='quiet',
967 action='store_true', default=False,
970 '--platform', dest='host_platform',
971 default=host_platform,
972 help='Custom platform other than the current (%s).' % host_platform)
974 '--arch', dest='host_arch',
976 help='Custom architecture other than the current (%s).' % host_arch)
978 '--package-targets', dest='package_targets',
980 help='Custom package targets specifed as comma separated names. Defaults'
981 ' to package targets defined for host platform and architecture inside'
982 ' of the packages json file.')
984 '--packages', dest='packages',
986 help='Custom packages specified as comma separated package names. Custom'
987 ' packages not defined by the packages json file must be prefixed by'
988 ' the package_target directory (IE. $PACKAGE_TARGET/$PACKAGE).')
990 '--append', metavar='PACKAGE', dest='append_packages',
991 action='append', default=[],
992 help='Append extra package to current list of packages.')
994 '--exclude', metavar='PACKAGE', dest='exclude_packages',
995 action='append', default=[],
996 help='Exclude package from current list of packages.')
998 '--packages-json', dest='packages_json',
999 default=DEFAULT_PACKAGES_JSON, type=argparse.FileType('rt'),
1000 help='Packages description file.'
1001 ' [Default: %s]' % DEFAULT_PACKAGES_JSON)
1002 parser.add_argument(
1003 '--revisions-dir', dest='revisions_dir',
1004 default=DEFAULT_REVISIONS_DIR,
1005 help='Revisions directory where packages revisions will be found.')
1006 parser.add_argument(
1007 '--dest-dir', dest='dest_dir',
1008 default=DEFAULT_DEST_DIR,
1009 help='Destination directory where all the packages will be extracted to.')
1010 parser.add_argument(
1011 '--tar-dir', dest='tar_dir',
1013 help='Directory for package archive files. Defaults to "$DEST-DIR/.tars".')
1014 parser.add_argument(
1015 '--annotate', dest='annotate',
1016 action='store_true', default=False,
1017 help='Print out build bot annotations.')
1018 parser.add_argument(
1019 '--cloud-bucket', dest='cloud_bucket',
1020 default=DEFAULT_CLOUD_BUCKET,
1021 help='Google storage cloud bucket name.'
1022 ' [Default: %s]' % DEFAULT_CLOUD_BUCKET)
1024 # Add subparsers for all commands. These are flags for specific commands,
1025 # IE. [options] command [command-options]
1026 command_parser = parser.add_subparsers(title='command', dest='command')
1027 for command, cmd_funcs in COMMANDS.iteritems():
1028 sub_parser = command_parser.add_parser(command)
1029 cmd_funcs.parse_func(sub_parser)
1031 arguments = parser.parse_args(args)
1032 pynacl.log_tools.SetupLogging(arguments.verbose, quiet=arguments.quiet)
1033 if arguments.tar_dir is None:
1034 arguments.tar_dir = os.path.join(arguments.dest_dir, '.tars')
1036 # Parse the package description up front and store it into the arguments
1037 # object. Almost all the commands need to use this information.
1038 packages_desc = packages_info.PackagesInfo(arguments.packages_json)
1039 arguments.packages_desc = packages_desc
1041 # Based on the host platform and host architecture, we can determine the set
1042 # of package targets used from the packages description. Minimize platform
1043 # and architecture errors by standardizing the names using pynacl.platform.
1044 if arguments.package_targets is None:
1045 package_targets = packages_desc.GetPackageTargets(
1046 pynacl.platform.GetOS(arguments.host_platform),
1047 pynacl.platform.GetArch3264(arguments.host_arch))
1049 package_targets = arguments.package_targets.split(',')
1051 # If the packages argument were not set, use the default list of packages
1052 # for each package target.
1053 packages_set = set()
1054 if arguments.packages is None:
1055 for package_target in package_targets:
1056 packages = packages_desc.GetPackages(package_target)
1057 if packages is None:
1058 raise NameError('No packages defined for Package Target: %s.' %
1060 packages_set.update(packages)
1062 packages_set.update(arguments.packages.split(','))
1064 # Append/exclude any extra packages that were specified.
1065 packages_set.update(arguments.append_packages)
1066 packages_set.difference_update(arguments.exclude_packages)
1068 # Build a dictionary that organizes packages to their respective package
1069 # targets. Packages may exist in multiple package targets so we will have
1070 # to have the key be package and value be a list of package targets.
1071 package_targets_dict = collections.defaultdict(list)
1072 for package_target in package_targets:
1073 for package in packages_desc.GetPackages(package_target):
1074 package_targets_dict[package].append(package_target)
1076 # Use the list of packages to determine the set of package target packages
1077 # we are operating on, custom package targets will have the package target
1078 # inside of the name of the package name (see help for "--packages" argument).
1079 # The package_target_packages is a list of tuples (package_target, package),
1080 # for every package along with the associated package target.
1081 package_target_packages = []
1082 for package in sorted(packages_set):
1083 package_targets = package_targets_dict.get(package, None)
1084 if package_targets is None:
1085 custom_package_targets = GetPackageTargetPackages(package, [])
1086 if not custom_package_targets:
1087 raise NameError('Invalid custom package: "%s".' % package
1088 + ' Expected $PACKAGE_TARGET'
1091 package_target_packages.extend(custom_package_targets)
1093 for package_target in package_targets:
1094 package_target_packages.append((package_target, package))
1096 arguments.package_target_packages = package_target_packages
1098 # Create a GSD Storage object for those who need it.
1099 cloud_bucket = arguments.cloud_bucket
1100 gsd_store = pynacl.gsd_storage.GSDStorage(cloud_bucket, [cloud_bucket])
1101 arguments.gsd_store = gsd_store
1107 arguments = ParseArgs(args)
1108 return COMMANDS[arguments.command].do_cmd_func(arguments)
1111 if __name__ == '__main__':
1112 sys.exit(main(sys.argv[1:]))