2 # Copyright (c) 2014 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """This script handles all of the processing for versioning packages.
8 package_version.py manages all of the various operations done between
9 packages, including archiving, extracting, uploading, and downloading
10 packages. For a list of options and commands, see the help for the script.
13 Package: A list of archives, such as "nacl_x86_glibc" or "nacl_x86_newlib".
14 Package Archive: An archive (usually a tar file) that is part of a package.
15 Package Target: Package targets consists of packages. Each package target
16 has it's own version of a package. An example of a package target would
17 be something such as "win_x86_nacl_x86" or "mac_x86_nacl_x86". In that case,
18 "win_x86_nacl_x86" and "mac_x86_nacl_x86" would each have their own version
19 of "nacl_x86_glibc" and "nacl_x86_newlib" for windows and mac respectively.
20 Revision Number: The SVN revision number of a sanctioned version. This number
21 is used to synchronize packages to sanctioned versions.
24 Packages File - A file which describes the various package targets for each
25 platform/architecture along with the packages associated with each package
27 [Default file: build/package_version/standard_packages.json].
28 Package File - A file which contains the list of package archives within
30 [Default file: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE.json]
31 Archive File - A file which describes an archive within a package. Each
32 archive description file will contain information about an archive such
33 as name, URL to download from, and hash.
34 [Default File: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE/$ARCHIVE.json]
35 Revision File - A file which describes the sanctioned version of package
36 for each of the package targets associated with it.
37 [Default file: toolchain_revisions/$PACKAGE.json]
47 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
48 sys.path.append(os.path.dirname(SCRIPT_DIR))
51 sys.path.append(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
52 import pynacl.file_tools
53 import pynacl.gsd_storage
54 import pynacl.log_tools
55 import pynacl.platform
56 import pynacl.working_directory
61 import package_locations
65 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
66 BUILD_DIR = os.path.dirname(CURRENT_DIR)
67 NACL_DIR = os.path.dirname(BUILD_DIR)
71 DEFAULT_PACKAGES_JSON = os.path.join(CURRENT_DIR, 'standard_packages.json')
72 DEFAULT_REVISIONS_DIR = os.path.join(NACL_DIR, 'toolchain_revisions')
73 DEFAULT_DEST_DIR = os.path.join(NACL_DIR, 'toolchain')
74 DEFAULT_CLOUD_BUCKET = 'nativeclient-archive2'
78 # These are helper functions that help each command.
81 def CleanTempFiles(directory):
82 """Cleans up all temporary files ending with TEMP_SUFFIX in a directory."""
83 for root, dirs, files in os.walk(directory):
84 for file_name in files:
85 if file_name.endswith(TEMP_SUFFIX):
86 file_path = os.path.join(root, file_name)
90 def GetPackageTargetPackages(custom_package_name, package_target_packages):
91 """Returns a list of package target packages given a custom package name.
93 A custom package name can either have a specified package target attached
94 to it (IE. $PACKAGE_TARGET/PACKAGE_NAME) or be extracted out of a default
95 list of package targets along with their packages.
98 custom_package_name: Package name with an optional package target.
99 package_target_packages: List of tuples (package_target, package).
101 List of package target tuples matching the package name.
103 package_path = custom_package_name.replace('\\', os.path.sep)
104 package_path = package_path.replace('/', os.path.sep)
105 if os.path.sep in package_path:
106 # Package target is part of the custom package name, just return it.
107 package_target, package_name = package_path.split(os.path.sep, 1)
108 return [(package_target, package_name)]
110 # Package target is not part of the package name, filter from list of passed
111 # in package targets.
113 (package_target, package)
114 for package_target, package in package_target_packages
115 if package == custom_package_name
119 def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
120 downloader=None, revision_num=None,
122 """Downloads package archives from the cloud to the tar directory.
125 tar_dir: Root tar directory where archives will be downloaded to.
126 package_target: Package target of the package to download.
127 package_name: Package name of the package to download.
128 package_desc: package_info object of the package to download.
129 downloader: function which takes a url and a file path for downloading.
131 The list of files that were downloaded.
133 downloaded_files = []
134 if downloader is None:
135 downloader = pynacl.gsd_storage.HttpDownload
136 local_package_file = package_locations.GetLocalPackageFile(tar_dir,
139 # To ensure that we do not redownload extra archives that we already have,
140 # create a dictionary of old package archives that contains the hash of each
143 if os.path.isfile(local_package_file):
145 old_package_desc = package_info.PackageInfo(local_package_file)
146 old_archives_list = old_package_desc.GetArchiveList()
147 old_archive_names = [archive.GetArchiveData().name
149 in old_archives_list]
150 for archive_name in old_archive_names:
151 archive_file = package_locations.GetLocalPackageArchiveFile(
158 archive_hash = archive_info.GetArchiveHash(archive_file)
159 if archive_hash is not None:
160 old_archives[archive_name] = archive_hash
162 # Nothing can be trusted here anymore, delete all package archives.
163 archive_directory = package_locations.GetLocalPackageArchiveDir(
168 os.unlink(local_package_file)
169 pynacl.file_tools.RemoveDir(archive_directory)
171 # Download packages information file along with each of the package
172 # archives described in the information file. Also keep track of what
173 # new package names matches old package names. We will have to delete
174 # stale package names after we are finished.
176 for archive_obj in package_desc.GetArchiveList():
177 archive_desc = archive_obj.GetArchiveData()
178 old_hash = old_archives.get(archive_desc.name, None)
179 if old_hash is not None:
180 old_archives.pop(archive_desc.name)
181 if archive_desc.hash == old_hash:
182 logging.debug('Skipping matching archive: %s', archive_desc.name)
184 update_archives.append(archive_obj)
187 logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
188 num_archives = len(update_archives)
189 for index, archive_obj in enumerate(update_archives):
190 archive_desc = archive_obj.GetArchiveData()
191 local_archive_file = package_locations.GetLocalPackageArchiveFile(
197 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
199 if archive_desc.url is None:
200 raise error.Error('Error, no URL for archive: %s' % archive_desc.name)
202 logging.info('Downloading package archive: %s (%d/%d)' %
203 (archive_desc.name, index+1, num_archives))
205 downloader(archive_desc.url, local_archive_file)
206 except Exception as e:
207 raise error.Error('Could not download URL (%s): %s' %
208 (archive_desc.url, e))
210 # Delete any stale log files
211 local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
214 if os.path.isfile(local_archive_log):
215 os.unlink(local_archive_log)
217 verified_hash = archive_info.GetArchiveHash(local_archive_file)
218 if verified_hash != archive_desc.hash:
219 raise error.Error('Package hash check failed: %s != %s' %
220 (verified_hash, archive_desc.hash))
222 downloaded_files.append(local_archive_file)
224 # Download any logs if include_logs is True.
227 for archive_obj in package_desc.GetArchiveList():
228 archive_desc = archive_obj.GetArchiveData()
229 if archive_desc.log_url:
230 local_archive_file = package_locations.GetLocalPackageArchiveFile(
236 local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
239 if not os.path.isfile(local_archive_log):
240 download_log_tuple = (archive_desc.name,
241 archive_desc.log_url,
243 download_logs.append(download_log_tuple)
246 logging.info('--Syncing %s Logs--' % (package_name))
247 num_logs = len(download_logs)
248 for index, download_log_tuple in enumerate(download_logs):
249 name, log_url, local_log_file = download_log_tuple
250 logging.info('Downloading archive log: %s (%d/%d)' %
251 (name, index+1, num_logs))
254 downloader(log_url, local_log_file)
255 except Exception as e:
256 raise IOError('Could not download log URL (%s): %s' %
259 # Delete any stale left over packages.
260 for old_archive in old_archives:
261 archive_file = package_locations.GetLocalPackageArchiveFile(
266 os.unlink(archive_file)
268 archive_log = package_locations.GetLocalPackageArchiveLogFile(archive_file)
269 if os.path.isfile(archive_log):
270 os.unlink(archive_log)
272 # Save the package file so we know what we currently have.
273 if update_archives or old_archives:
274 package_desc.SavePackageFile(local_package_file)
276 return downloaded_files
279 def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
281 """Archives local package archives to the tar directory.
284 tar_dir: Root tar directory where archives live.
285 package_target: Package target of the package to archive.
286 package_name: Package name of the package to archive.
287 archives: List of archive file paths where archives currently live.
288 extra_archives: Extra archives that are expected to be build elsewhere.
290 Returns the local package file that was archived.
292 local_package_file = package_locations.GetLocalPackageFile(tar_dir,
296 valid_archive_files = set()
299 package_desc = package_info.PackageInfo()
300 package_archives = ([(archive, False) for archive in archives] +
301 [(archive, True) for archive in extra_archives])
302 for archive, skip_missing in package_archives:
304 archive_log_url = None
306 archive, archive_url = archive.split('@', 1)
307 if ',' in archive_url:
308 archive_url, archive_log_url = archive_url.split(',', 1)
314 archive, extract_param = archive.split(',', 1)
315 if ':' in extract_param:
316 tar_src_dir, extract_dir = extract_param.split(':', 1)
318 tar_src_dir = extract_param
320 archive_hash = archive_info.GetArchiveHash(archive)
321 archive_name = os.path.basename(archive)
322 archive_desc = archive_info.ArchiveInfo(name=archive_name,
325 tar_src_dir=tar_src_dir,
326 extract_dir=extract_dir,
327 log_url=archive_log_url)
328 package_desc.AppendArchive(archive_desc)
330 if archive_hash is None:
332 logging.info('Skipping archival of missing file: %s', archive)
334 raise error.Error('Invalid package: %s.' % archive)
335 archive_list.append(archive)
337 archive_basename = os.path.basename(archive)
338 archive_json = archive_basename + '.json'
339 valid_archive_files.update([archive_basename, archive_json])
341 # Delete any stale archive files
342 local_archive_dir = package_locations.GetLocalPackageArchiveDir(
347 if os.path.isdir(local_archive_dir):
348 for dir_item in os.listdir(local_archive_dir):
349 if dir_item in valid_archive_files:
352 item_path = os.path.join(local_archive_dir, dir_item)
353 if os.path.isdir(item_path):
354 pynacl.file_tools.RemoveDir(item_path)
356 pynacl.file_tools.RemoveFile(item_path)
358 # We do not need to archive the package if it already matches. But if the
359 # local package file is invalid or does not match, then we should recreate
361 if os.path.isfile(local_package_file):
363 current_package_desc = package_info.PackageInfo(local_package_file,
365 if current_package_desc == package_desc:
370 # Copy each of the packages over to the tar directory first.
371 for archive_file in archive_list:
372 archive_name = os.path.basename(archive_file)
373 local_archive_file = package_locations.GetLocalPackageArchiveFile(
379 logging.info('Archiving file: %s', archive_file)
380 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
381 shutil.copyfile(archive_file, local_archive_file)
383 # Once all the copying is completed, update the local packages file.
384 logging.info('Package "%s" archived: %s', package_name, local_package_file)
385 pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
386 package_desc.SavePackageFile(local_package_file)
388 return local_package_file
391 def UploadPackage(storage, revision, tar_dir, package_target, package_name,
392 is_shared_package, annotate=False, skip_missing=False,
393 custom_package_file=None):
394 """Uploads a local package file to the supplied cloud storage object.
396 By default local package files are expected to be found in the standardized
397 location within the tar directory, however a custom package file may be
398 specified to upload from a different location. Package archives that do not
399 have their URL field set will automaticaly have the archives uploaded so that
400 someone accessing the package file from the cloud storage will also have
401 access to the package archives.
404 storage: Cloud storage object which supports PutFile and GetFile.
405 revision: SVN Revision number the package should be associated with.
406 tar_dir: Root tar directory where archives live.
407 package_target: Package target of the package to archive.
408 package_name: Package name of the package to archive.
409 is_shared_package: Is this package shared among all package targets?
410 annotate: Print annotations for build bots?
411 skip_missing: Skip missing package archive files?
412 custom_package_file: File location for a custom package file.
414 Returns remote download key for the uploaded package file.
416 if custom_package_file is not None:
417 local_package_file = custom_package_file
419 local_package_file = package_locations.GetLocalPackageFile(
424 # Upload the package file and also upload any local package archives so
425 # that they are downloadable.
426 package_desc = package_info.PackageInfo(local_package_file,
427 skip_missing=skip_missing)
428 upload_package_desc = package_info.PackageInfo()
430 for archive_obj in package_desc.GetArchiveList():
431 archive_desc = archive_obj.GetArchiveData()
432 url = archive_desc.url
433 if archive_desc.hash and url is None:
435 print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name
437 archive_file = package_locations.GetLocalPackageArchiveFile(
442 archive_hash = archive_info.GetArchiveHash(archive_file)
443 if archive_hash is None:
444 raise error.Error('Missing Archive File: %s' % archive_file)
445 elif archive_hash != archive_desc.hash:
447 'Archive hash does not match package hash: %s' % archive_file
448 + '\n Archive Hash: %s' % archive_hash
449 + '\n Package Hash: %s' % archive_desc.hash)
451 logging.warn('Missing archive URL: %s', archive_desc.name)
452 logging.warn('Uploading archive to be publically available...')
453 remote_archive_key = package_locations.GetRemotePackageArchiveKey(
456 url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
458 print '@@@STEP_LINK@download@%s@@@' % url
460 updated_archive_obj = archive_obj.Copy(url=url)
461 upload_package_desc.AppendArchive(updated_archive_obj)
463 upload_package_file = local_package_file + '.upload'
464 pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
465 upload_package_desc.SavePackageFile(upload_package_file)
467 logging.info('Uploading package information: %s', package_name)
468 remote_package_key = package_locations.GetRemotePackageKey(
473 package_info.UploadPackageInfoFiles(storage, package_target, package_name,
474 remote_package_key, upload_package_file,
475 skip_missing=skip_missing,
478 return remote_package_key
481 def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
482 downloader=None, skip_missing=False, quiet=False):
483 """Extracts package targets from the tar directory to the destination.
485 Each package archive within a package will be verified before being
486 extracted. If a package archive does not exist or does not match the hash
487 stored within the package file, it will be re-downloaded before being
491 package_target_packages: List of tuples of package target and package names.
492 tar_dir: Source tar directory where package archives live.
493 dest_dir: Root destination directory where packages will be extracted to.
494 downloader: function which takes a url and a file path for downloading.
496 if downloader is None:
497 downloader = pynacl.gsd_storage.HttpDownload
499 for package_target, package_name in package_target_packages:
500 package_file = package_locations.GetLocalPackageFile(tar_dir,
503 package_desc = package_info.PackageInfo(package_file,
504 skip_missing=skip_missing)
505 dest_package_dir = package_locations.GetFullDestDir(dest_dir,
508 dest_package_file = package_locations.GetDestPackageFile(dest_dir,
512 # Only do the extraction if the extract packages do not match.
513 if os.path.isfile(dest_package_file):
515 dest_package_desc = package_info.PackageInfo(dest_package_file)
516 if dest_package_desc == package_desc:
517 logging.debug('Skipping extraction for package (%s)', package_name)
520 # Destination package file cannot be trusted, if invalid re-extract.
523 # Delete the old package file before we extract.
524 os.unlink(dest_package_file)
526 if os.path.isdir(dest_package_dir):
527 logging.debug('Deleting old package directory: %s', dest_package_dir)
528 pynacl.file_tools.RemoveDir(dest_package_dir)
530 logging.info('Extracting package (%s) to directory: %s',
531 package_name, dest_package_dir)
532 archive_list = package_desc.GetArchiveList()
533 num_archives = len(archive_list)
534 for index, archive_obj in enumerate(archive_list):
535 archive_desc = archive_obj.GetArchiveData()
536 archive_file = package_locations.GetLocalPackageArchiveFile(
543 # Upon extraction, some files may not be downloaded (or have stale files),
544 # we need to check the hash of each file and attempt to download it if
546 archive_hash = archive_info.GetArchiveHash(archive_file)
547 if archive_hash != archive_desc.hash:
548 if archive_desc.url is None:
550 logging.info('Skipping extraction of missing archive: %s' %
553 raise error.Error('Invalid archive file and URL: %s' % archive_file)
555 logging.warn('Expected archive missing, downloading: %s',
558 pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
559 downloader(archive_desc.url, archive_file)
560 archive_hash = archive_info.GetArchiveHash(archive_file)
561 if archive_hash != archive_desc.hash:
562 raise error.Error('Downloaded archive file does not match hash.'
563 ' [%s] Expected %s, received %s.' %
564 (archive_file, archive_desc.hash, archive_hash))
566 destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
567 logging.info('Extracting %s (%d/%d)' %
568 (archive_desc.name, index+1, num_archives))
570 temp_dir = os.path.join(destination_dir, '.tmp')
571 pynacl.file_tools.RemoveDir(temp_dir)
572 os.makedirs(temp_dir)
573 tar_output = not quiet
574 tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
583 temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
584 pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
585 pynacl.file_tools.RemoveDir(temp_dir)
587 pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
588 package_desc.SavePackageFile(dest_package_file)
592 # Each Command has 2 functions that describes it:
593 # 1. A parser function which specifies the extra command options each command
595 # 2. An execution function which is called when a user actually executes
598 def _ListCmdArgParser(subparser):
599 subparser.description = 'Lists package information.'
602 def _DoListCmd(arguments):
603 package_targets = collections.defaultdict(list)
604 for package_target, package in arguments.package_target_packages:
605 package_targets[package_target].append(package)
607 print 'Listing Package Targets and Packages:'
608 for package_target, packages in package_targets.iteritems():
609 print '\n%s:' % package_target
610 for package in sorted(packages):
614 def _ArchiveCmdArgParser(subparser):
615 subparser.description = 'Archive package archives to tar directory.'
616 subparser.add_argument(
617 '--archive-package', metavar='NAME', dest='archive__package',
619 help='Package name archives will be packaged into.')
620 subparser.add_argument(
621 '--extra-archive', metavar='ARCHIVE', dest='archive__extra_archive',
622 action='append', default=[],
623 help='Extra archives that are expected to be built elsewhere.')
624 subparser.add_argument(
625 metavar='TAR(,SRCDIR(:EXTRACTDIR))(@URL,LOGURL)', dest='archive__archives',
627 help='Package archive with an optional tar information and url.'
628 ' SRCDIR is the root directory where files live inside of the tar.'
629 ' EXTRACTDIR is the directory to extract files to relative to the'
630 ' destination directory. The URL is where the package can be'
632 subparser.add_argument(
633 '-x', '--extract', dest='archive__extract',
634 action='store_true', default=False,
635 help='Extract package archives after they have been archived.')
638 def _DoArchiveCmd(arguments):
639 package_target_packages = GetPackageTargetPackages(
640 arguments.archive__package,
641 arguments.package_target_packages
643 if not package_target_packages:
644 raise error.Error('Unknown package: %s.' % arguments.archive__package
645 + ' Did you forget to add "$PACKAGE_TARGET/"?')
647 for package_target, package_name in package_target_packages:
648 ArchivePackageArchives(arguments.tar_dir,
651 arguments.archive__archives,
652 extra_archives=arguments.archive__extra_archive)
654 if arguments.archive__extract:
655 ExtractPackageTargets([(package_target, package_name)],
659 quiet=arguments.quiet)
662 def _ExtractCmdArgParser(subparser):
663 subparser.description = 'Extract packages from tar directory.'
664 subparser.add_argument(
665 '--skip-missing', dest='extract__skip_missing',
666 action='store_true', default=False,
667 help='Skip missing archive files when extracting rather than erroring out.')
670 def _DoExtractCmd(arguments):
671 ExtractPackageTargets(
672 arguments.package_target_packages,
675 skip_missing=arguments.extract__skip_missing,
676 quiet=arguments.quiet)
679 def _UploadCmdArgParser(subparser):
680 subparser.description = 'Upload a package file.'
681 subparser.add_argument(
682 '--upload-package', metavar='NAME', dest='upload__package', required=True,
683 help='Package to upload.')
684 subparser.add_argument(
685 '--revision', metavar='NUM', dest='upload__revision', required=True,
686 help='SVN Revision of the package to upload.')
687 subparser.add_argument(
688 '--package-file', metavar='FILE', dest='upload__file',
690 help='Use custom package file instead of standard package file found'
691 ' in the tar directory.')
692 subparser.add_argument(
693 '--skip-missing', dest='upload__skip_missing',
694 action='store_true', default=False,
695 help='Skip missing archive files when uploading package archives.')
698 def _DoUploadCmd(arguments):
699 package_target_packages = GetPackageTargetPackages(
700 arguments.upload__package,
701 arguments.package_target_packages
703 if not package_target_packages:
704 raise error.Error('Unknown package: %s.' % arguments.upload__package
705 + ' Did you forget to add "$PACKAGE_TARGET/"?')
707 for package_target, package_name in package_target_packages:
710 arguments.upload__revision,
714 arguments.packages_desc.IsSharedPackage(package_name),
715 annotate=arguments.annotate,
716 skip_missing=arguments.upload__skip_missing,
717 custom_package_file=arguments.upload__file
721 def _SyncCmdArgParser(subparser):
722 subparser.description = 'Download package archives to the tar directory.'
723 subparser.add_argument(
724 '--revision', metavar='NUM', dest='sync__revision',
726 help='SVN Revision of the packages to download.')
727 subparser.add_argument(
728 '--include-logs', dest='sync__include_logs',
729 action='store_true', default=False,
730 help='Also download logs next to each archive if available.')
731 subparser.add_argument(
732 '-x', '--extract', dest='sync__extract',
733 action='store_true', default=False,
734 help='Extract package archives after they have been downloaded.')
737 def _DoSyncCmd(arguments):
738 for package_target, package_name in arguments.package_target_packages:
739 if arguments.sync__revision is None:
740 # When the sync revision number is not specified, use the set
741 # revision number found in the revision directory.
742 revision_file = package_locations.GetRevisionFile(
743 arguments.revisions_dir,
745 revision_desc = revision_info.RevisionInfo(
746 arguments.packages_desc,
748 package_desc = revision_desc.GetPackageInfo(package_target)
749 revision_num = revision_desc.GetRevisionNumber()
751 # When the sync revision number is specified, find the package to
752 # download remotely using the revision.
753 revision_num = arguments.sync__revision
754 remote_package_key = package_locations.GetRemotePackageKey(
755 arguments.packages_desc.IsSharedPackage(package_name),
756 arguments.sync__revision,
759 with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
760 temp_package_file = os.path.join(
762 os.path.basename(remote_package_key) + TEMP_SUFFIX)
764 package_info.DownloadPackageInfoFiles(
767 downloader=arguments.gsd_store.GetFile)
769 package_desc = package_info.PackageInfo(temp_package_file)
771 DownloadPackageArchives(
776 revision_num=revision_num,
777 include_logs=arguments.sync__include_logs)
779 CleanTempFiles(arguments.tar_dir)
781 if arguments.sync__extract:
782 ExtractPackageTargets(
783 arguments.package_target_packages,
786 quiet=arguments.quiet)
789 def _SetRevisionCmdArgParser(subparser):
790 subparser.description = 'Specify the revision of a package.'
791 subparser.add_argument(
792 '--revision-package', metavar='NAME', dest='setrevision__package',
794 help='Package name to set revision of.')
795 subparser.add_argument(
796 '--revision', metavar='NUM', dest='setrevision__revision',
797 type=int, required=True,
798 help='SVN Revision of the package to set.')
801 def _DoSetRevisionCmd(arguments):
802 package_name = arguments.setrevision__package
803 revision_num = arguments.setrevision__revision
805 revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
806 revision_desc.SetRevisionNumber(revision_num)
808 custom_package_targets = GetPackageTargetPackages(package_name, [])
809 if not custom_package_targets:
810 package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
813 package_targets = [target[0] for target in custom_package_targets]
814 first_target = custom_package_targets[0]
815 package_name = first_target[1]
817 with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
818 for package_target in package_targets:
819 remote_package_key = package_locations.GetRemotePackageKey(
820 arguments.packages_desc.IsSharedPackage(package_name),
825 temp_package_file = os.path.join(
827 os.path.basename(remote_package_key) + TEMP_SUFFIX)
829 package_info.DownloadPackageInfoFiles(
832 downloader=arguments.gsd_store.GetFile)
834 package_desc = package_info.PackageInfo(temp_package_file)
836 logging.info('Setting %s:%s to revision %s',
837 package_target, package_name, revision_num)
838 revision_desc.SetTargetRevision(
843 revision_file = package_locations.GetRevisionFile(
844 arguments.revisions_dir,
846 pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
847 revision_desc.SaveRevisionFile(revision_file)
849 CleanTempFiles(arguments.revisions_dir)
852 def _GetRevisionCmdArgParser(subparser):
853 subparser.description = 'Get the revision of a package.'
854 subparser.add_argument(
855 '--revision-package', metavar='NAME', dest='getrevision__package',
857 help='Package name to get revision of.')
860 def _DoGetRevisionCmd(arguments):
861 package_name = arguments.getrevision__package
863 custom_package_targets = GetPackageTargetPackages(package_name, [])
864 if custom_package_targets:
865 custom_target, package_name = custom_package_targets[0]
867 revision_file = package_locations.GetRevisionFile(arguments.revisions_dir,
870 if not os.path.isfile(revision_file):
871 raise error.Error('No revision set for package: %s.' % package_name)
873 revision_desc = revision_info.RevisionInfo(arguments.packages_desc,
875 print revision_desc.GetRevisionNumber()
878 def _FillEmptyTarsParser(subparser):
879 subparser.description = 'Fill missing archives with empty ones in a package.'
880 subparser.add_argument(
881 '--fill-package', metavar='NAME', dest='fillemptytars_package',
883 help='Package name to fill empty archives of.')
886 def _DoFillEmptyTarsCmd(arguments):
887 package_target_packages = GetPackageTargetPackages(
888 arguments.fillemptytars_package,
889 arguments.package_target_packages
891 if not package_target_packages:
892 raise error.Error('Unknown package: %s.' % arguments.fillemptytars_package
893 + ' Did you forget to add "$PACKAGE_TARGET/"?')
895 for package_target, package_name in package_target_packages:
896 package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
900 package_desc = package_info.PackageInfo(package_path, skip_missing=True)
901 output_package_desc = package_info.PackageInfo()
902 for archive in package_desc.GetArchiveList():
903 # If archive does not exist, fill it with an empty one.
904 archive_data = archive.GetArchiveData()
905 if archive_data.hash:
906 output_package_desc.AppendArchive(archive)
908 logging.info('Filling missing archive: %s.', archive_data.name)
909 if (archive_data.name.endswith('.tar.gz') or
910 archive_data.name.endswith('.tgz')):
912 elif archive_data.name.endswith('.bz2'):
914 elif archive_data.name.endswith('.tar'):
917 raise error.Error('Unknown archive type: %s.' % archive_data.name)
919 archive_file = package_locations.GetLocalPackageArchiveFile(
926 tar_file = cygtar.CygTar(archive_file, mode)
928 tar_hash = archive_info.GetArchiveHash(archive_file)
930 empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
932 output_package_desc.AppendArchive(empty_archive)
934 output_package_desc.SavePackageFile(package_path)
937 def _RecalcRevisions(subparser):
938 subparser.description = 'Recalculates hashes for files in revision directory.'
941 def _DoRecalcRevisions(arguments):
942 for json_file in os.listdir(arguments.revisions_dir):
943 if json_file.endswith('.json'):
944 revision_file = os.path.join(arguments.revisions_dir, json_file)
945 revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
946 revision_desc.LoadRevisionFile(revision_file, skip_hash_verify=True)
947 revision_desc.SaveRevisionFile(revision_file)
950 CommandFuncs = collections.namedtuple(
952 ['parse_func', 'do_cmd_func'])
956 'list': CommandFuncs(_ListCmdArgParser, _DoListCmd),
957 'archive': CommandFuncs(_ArchiveCmdArgParser, _DoArchiveCmd),
958 'extract': CommandFuncs(_ExtractCmdArgParser, _DoExtractCmd),
959 'upload': CommandFuncs(_UploadCmdArgParser, _DoUploadCmd),
960 'sync': CommandFuncs(_SyncCmdArgParser, _DoSyncCmd),
961 'setrevision': CommandFuncs(_SetRevisionCmdArgParser, _DoSetRevisionCmd),
962 'getrevision': CommandFuncs(_GetRevisionCmdArgParser, _DoGetRevisionCmd),
963 'fillemptytars': CommandFuncs(_FillEmptyTarsParser, _DoFillEmptyTarsCmd),
964 'recalcrevisions': CommandFuncs(_RecalcRevisions, _DoRecalcRevisions),
969 parser = argparse.ArgumentParser()
971 host_platform = pynacl.platform.GetOS()
972 host_arch = pynacl.platform.GetArch3264()
974 # List out global options for all commands.
976 '-v', '--verbose', dest='verbose',
977 action='store_true', default=False,
978 help='Verbose output')
980 '-q', '--quiet', dest='quiet',
981 action='store_true', default=False,
984 '--platform', dest='host_platform',
985 default=host_platform,
986 help='Custom platform other than the current (%s).' % host_platform)
988 '--arch', dest='host_arch',
990 help='Custom architecture other than the current (%s).' % host_arch)
992 '--package-targets', dest='package_targets',
994 help='Custom package targets specifed as comma separated names. Defaults'
995 ' to package targets defined for host platform and architecture inside'
996 ' of the packages json file.')
998 '--packages', dest='packages',
1000 help='Custom packages specified as comma separated package names. Custom'
1001 ' packages not defined by the packages json file must be prefixed by'
1002 ' the package_target directory (IE. $PACKAGE_TARGET/$PACKAGE).')
1003 parser.add_argument(
1004 '--append', metavar='PACKAGE', dest='append_packages',
1005 action='append', default=[],
1006 help='Append extra package to current list of packages.')
1007 parser.add_argument(
1008 '--exclude', metavar='PACKAGE', dest='exclude_packages',
1009 action='append', default=[],
1010 help='Exclude package from current list of packages.')
1011 parser.add_argument(
1012 '--packages-json', dest='packages_json',
1013 default=DEFAULT_PACKAGES_JSON, type=argparse.FileType('rt'),
1014 help='Packages description file.'
1015 ' [Default: %s]' % DEFAULT_PACKAGES_JSON)
1016 parser.add_argument(
1017 '--revisions-dir', dest='revisions_dir',
1018 default=DEFAULT_REVISIONS_DIR,
1019 help='Revisions directory where packages revisions will be found.')
1020 parser.add_argument(
1021 '--dest-dir', dest='dest_dir',
1022 default=DEFAULT_DEST_DIR,
1023 help='Destination directory where all the packages will be extracted to.')
1024 parser.add_argument(
1025 '--tar-dir', dest='tar_dir',
1027 help='Directory for package archive files. Defaults to "$DEST-DIR/.tars".')
1028 parser.add_argument(
1029 '--annotate', dest='annotate',
1030 action='store_true', default=False,
1031 help='Print out build bot annotations.')
1032 parser.add_argument(
1033 '--cloud-bucket', dest='cloud_bucket',
1034 default=DEFAULT_CLOUD_BUCKET,
1035 help='Google storage cloud bucket name.'
1036 ' [Default: %s]' % DEFAULT_CLOUD_BUCKET)
1038 # Add subparsers for all commands. These are flags for specific commands,
1039 # IE. [options] command [command-options]
1040 command_parser = parser.add_subparsers(title='command', dest='command')
1041 for command, cmd_funcs in COMMANDS.iteritems():
1042 sub_parser = command_parser.add_parser(command)
1043 cmd_funcs.parse_func(sub_parser)
1045 arguments = parser.parse_args(args)
1046 pynacl.log_tools.SetupLogging(
1047 verbose=arguments.verbose, quiet=arguments.quiet)
1048 if arguments.tar_dir is None:
1049 arguments.tar_dir = os.path.join(arguments.dest_dir, '.tars')
1051 # Parse the package description up front and store it into the arguments
1052 # object. Almost all the commands need to use this information.
1053 packages_desc = packages_info.PackagesInfo(arguments.packages_json)
1054 arguments.packages_desc = packages_desc
1056 # Based on the host platform and host architecture, we can determine the set
1057 # of package targets used from the packages description. Minimize platform
1058 # and architecture errors by standardizing the names using pynacl.platform.
1059 if arguments.package_targets is None:
1060 package_targets = packages_desc.GetPackageTargets(
1061 pynacl.platform.GetOS(arguments.host_platform),
1062 pynacl.platform.GetArch3264(arguments.host_arch))
1064 package_targets = arguments.package_targets.split(',')
1066 # If the packages argument were not set, use the default list of packages
1067 # for each package target.
1068 packages_set = set()
1069 if arguments.packages is None:
1070 for package_target in package_targets:
1071 packages = packages_desc.GetPackages(package_target)
1072 if packages is None:
1073 raise error.Error('No packages defined for Package Target: %s.' %
1075 packages_set.update(packages)
1077 packages_set.update(arguments.packages.split(','))
1079 # Append/exclude any extra packages that were specified.
1080 packages_set.update(arguments.append_packages)
1081 packages_set.difference_update(arguments.exclude_packages)
1083 # Build a dictionary that organizes packages to their respective package
1084 # targets. Packages may exist in multiple package targets so we will have
1085 # to have the key be package and value be a list of package targets.
1086 package_targets_dict = collections.defaultdict(list)
1087 for package_target in package_targets:
1088 for package in packages_desc.GetPackages(package_target):
1089 package_targets_dict[package].append(package_target)
1091 # Use the list of packages to determine the set of package target packages
1092 # we are operating on, custom package targets will have the package target
1093 # inside of the name of the package name (see help for "--packages" argument).
1094 # The package_target_packages is a list of tuples (package_target, package),
1095 # for every package along with the associated package target.
1096 package_target_packages = []
1097 for package in sorted(packages_set):
1098 package_targets = package_targets_dict.get(package, None)
1099 if package_targets is None:
1100 custom_package_targets = GetPackageTargetPackages(package, [])
1101 if not custom_package_targets:
1102 raise error.Error('Invalid custom package: "%s".'
1103 ' Expected $PACKAGE_TARGET/$PACKAGE' % package)
1104 package_target_packages.extend(custom_package_targets)
1106 for package_target in package_targets:
1107 package_target_packages.append((package_target, package))
1109 arguments.package_target_packages = package_target_packages
1111 # Create a GSD Storage object for those who need it.
1112 cloud_bucket = arguments.cloud_bucket
1113 gsd_store = pynacl.gsd_storage.GSDStorage(cloud_bucket, [cloud_bucket])
1114 arguments.gsd_store = gsd_store
1121 arguments = ParseArgs(args)
1122 return COMMANDS[arguments.command].do_cmd_func(arguments)
1123 except error.Error as e:
1124 sys.stderr.write('package_version: ' + str(e) + '\n')
1128 if __name__ == '__main__':
1129 sys.exit(main(sys.argv[1:]))