Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / native_client / build / package_version / package_version.py
1 #!/usr/bin/python
2 # Copyright (c) 2014 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """This script handles all of the processing for versioning packages.
7
8 package_version.py manages all of the various operations done between
9 packages, including archiving, extracting, uploading, and downloading
10 packages. For a list of options and commands, see the help for the script.
11
12 Glossary:
13   Package: A list of archives, such as "nacl_x86_glibc" or "nacl_x86_newlib".
14   Package Archive: An archive (usually a tar file) that is part of a package.
15   Package Target: Package targets consists of packages. Each package target
16     has it's own version of a package. An example of a package target would
17     be something such as "win_x86_nacl_x86" or "mac_x86_nacl_x86". In that case,
18     "win_x86_nacl_x86" and "mac_x86_nacl_x86" would each have their own version
19     of "nacl_x86_glibc" and "nacl_x86_newlib" for windows and mac respectively.
20   Revision Number: The SVN revision number of a sanctioned version. This number
21     is used to synchronize packages to sanctioned versions.
22
23 JSON Files:
24   Packages File - A file which describes the various package targets for each
25     platform/architecture along with the packages associated with each package
26     target.
27     [Default file: build/package_version/standard_packages.json].
28   Package File - A file which contains the list of package archives within
29     a package.
30     [Default file: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE.json]
31   Archive File - A file which describes an archive within a package. Each
32     archive description file will contain information about an archive such
33     as name, URL to download from, and hash.
34     [Default File: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE/$ARCHIVE.json]
35   Revision File - A file which describes the sanctioned version of package
36     for each of the package targets associated with it.
37     [Default file: toolchain_revisions/$PACKAGE.json]
38 """
39
40 import argparse
41 import collections
42 import logging
43 import os
44 import shutil
45 import sys
46
47 sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
48 import cygtar
49
50 sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
51 import pynacl.file_tools
52 import pynacl.gsd_storage
53 import pynacl.log_tools
54 import pynacl.platform
55 import pynacl.working_directory
56
57 import archive_info
58 import package_info
59 import package_locations
60 import packages_info
61 import revision_info
62
63 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
64 BUILD_DIR = os.path.dirname(CURRENT_DIR)
65 NACL_DIR = os.path.dirname(BUILD_DIR)
66
67 TEMP_SUFFIX = '.tmp'
68
69 DEFAULT_PACKAGES_JSON = os.path.join(CURRENT_DIR, 'standard_packages.json')
70 DEFAULT_REVISIONS_DIR = os.path.join(NACL_DIR, 'toolchain_revisions')
71 DEFAULT_DEST_DIR = os.path.join(NACL_DIR, 'toolchain')
72 DEFAULT_CLOUD_BUCKET = 'nativeclient-archive2'
73
74
75 def RemoveOldToolchainFiles(toolchain_dir):
76   """Temporary code to delete old toolchain files."""
77   if not os.path.isdir(toolchain_dir):
78     return
79
80   # Delete old tar files
81   tars_dir = os.path.join(toolchain_dir, '.tars')
82   if os.path.isdir(tars_dir):
83     for tar_item in os.listdir(tars_dir):
84       tar_path = os.path.join(tars_dir, tar_item)
85       if os.path.isfile(tar_path):
86         print 'Removing stale tar file:', tar_path
87         os.unlink(tar_path)
88
89   # Delete any top level directories that do not conform to $OS_$ARCH.
90   valid_top_level_dirs = set()
91   for os_name in pynacl.platform.OS_LIST:
92     for arch_name in pynacl.platform.ARCH_LIST:
93       valid_top_level_dirs.add('%s_%s' % (os_name, arch_name))
94
95   # Delete any files and directories that do not conform to the standard.
96   # Do not touch any system files that begin with '.', including ".tars".
97   top_level_dirs = []
98   for dir_item in os.listdir(toolchain_dir):
99     if dir_item.startswith('.'):
100       continue
101
102     full_path = os.path.join(toolchain_dir, dir_item)
103     if dir_item in valid_top_level_dirs:
104       top_level_dirs.append(full_path)
105       continue
106     print 'Removing stale toolchain item:', full_path
107     if os.path.isfile(full_path):
108       os.unlink(full_path)
109     else:
110       pynacl.file_tools.RemoveDir(full_path)
111
112   # Delete any second level dirs that do not conform one of the following:
113   #  1. It must be of the format "nacl_*".
114   #  2. It must be of the format "pnacl_*".
115   #  3. It must be of the format "*_trusted".
116   for top_level_dir in top_level_dirs:
117     for dir_item in os.listdir(top_level_dir):
118       if (dir_item.startswith('nacl_') or
119           dir_item.startswith('pnacl_') or
120           dir_item.endswith('_trusted')):
121         continue
122
123       full_path = os.path.join(top_level_dir, dir_item)
124       print 'Removing stale toolchain item:', full_path
125       if os.path.isfile(full_path):
126         os.unlink(full_path)
127       else:
128         pynacl.file_tools.RemoveDir(full_path)
129
130 #
131 # These are helper functions that help each command.
132 #
133
134 def CleanTempFiles(directory):
135   """Cleans up all temporary files ending with TEMP_SUFFIX in a directory."""
136   for root, dirs, files in os.walk(directory):
137     for file_name in files:
138       if file_name.endswith(TEMP_SUFFIX):
139         file_path = os.path.join(root, file_name)
140         os.unlink(file_path)
141
142
143 def GetPackageTargetPackages(custom_package_name, package_target_packages):
144   """Returns a list of package target packages given a custom package name.
145
146   A custom package name can either have a specified package target attached
147   to it (IE. $PACKAGE_TARGET/PACKAGE_NAME) or be extracted out of a default
148   list of package targets along with their packages.
149
150   Args:
151     custom_package_name: Package name with an optional package target.
152     package_target_packages: List of tuples (package_target, package).
153   Returns:
154     List of package target tuples matching the package name.
155   """
156   package_path = custom_package_name.replace('\\', os.path.sep)
157   package_path = package_path.replace('/', os.path.sep)
158   if os.path.sep in package_path:
159     # Package target is part of the custom package name, just return it.
160     package_target, package_name = package_path.split(os.path.sep, 1)
161     return [(package_target, package_name)]
162
163   # Package target is not part of the package name, filter from list of passed
164   # in package targets.
165   return [
166       (package_target, package)
167       for package_target, package in package_target_packages
168       if package == custom_package_name
169   ]
170
171
172 def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
173                             downloader=None, revision_num=None):
174   """Downloads package archives from the cloud to the tar directory.
175
176   Args:
177     tar_dir: Root tar directory where archives will be downloaded to.
178     package_target: Package target of the package to download.
179     package_name: Package name of the package to download.
180     package_desc: package_info object of the package to download.
181     downloader: function which takes a url and a file path for downloading.
182   Returns:
183     The list of files that were downloaded.
184   """
185   downloaded_files = []
186   if downloader is None:
187     downloader = pynacl.gsd_storage.HttpDownload
188   local_package_file = package_locations.GetLocalPackageFile(tar_dir,
189                                                              package_target,
190                                                              package_name)
191   # To ensure that we do not redownload extra archives that we already have,
192   # create a dictionary of old package archives that contains the hash of each
193   # package archive.
194   old_archives = {}
195   if os.path.isfile(local_package_file):
196     try:
197       old_package_desc = package_info.PackageInfo(local_package_file)
198       old_archives_list = old_package_desc.GetArchiveList()
199       old_archive_names = [archive.GetArchiveData().name
200                            for archive
201                            in old_archives_list]
202       for archive_name in old_archive_names:
203         archive_file = package_locations.GetLocalPackageArchiveFile(
204             tar_dir,
205             package_target,
206             package_name,
207             archive_name
208             )
209
210         archive_hash = archive_info.GetArchiveHash(archive_file)
211         if archive_hash is not None:
212           old_archives[archive_name] = archive_hash
213     except:
214       # Nothing can be trusted here anymore, delete all package archives.
215       archive_directory = package_locations.GetLocalPackageArchiveDir(
216           tar_dir,
217           package_target,
218           package_name
219           )
220       os.unlink(local_package_file)
221       pynacl.file_tools.RemoveDir(archive_directory)
222
223   # Download packages information file along with each of the package
224   # archives described in the information file. Also keep track of what
225   # new package names matches old package names. We will have to delete
226   # stale package names after we are finished.
227   update_archives = []
228   for archive_obj in package_desc.GetArchiveList():
229     archive_desc = archive_obj.GetArchiveData()
230     old_hash = old_archives.get(archive_desc.name, None)
231     if old_hash is not None:
232       old_archives.pop(archive_desc.name)
233       if archive_desc.hash == old_hash:
234         logging.debug('Skipping matching archive: %s', archive_desc.name)
235         continue
236     update_archives.append(archive_obj)
237
238   if update_archives:
239     logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
240     num_archives = len(update_archives)
241     for index, archive_obj in enumerate(update_archives):
242       archive_desc = archive_obj.GetArchiveData()
243       local_archive_file = package_locations.GetLocalPackageArchiveFile(
244           tar_dir,
245           package_target,
246           package_name,
247           archive_desc.name
248       )
249       pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
250
251       if archive_desc.url is None:
252         raise IOError('Error, no URL for archive: %s' % archive_desc.name)
253
254       logging.info('Downloading package archive: %s (%d/%d)' %
255                    (archive_desc.name, index+1, num_archives))
256       try:
257         downloader(archive_desc.url, local_archive_file)
258       except Exception as e:
259         raise IOError('Could not download URL (%s): %s' %
260                       (archive_desc.url, e))
261
262       verified_hash = archive_info.GetArchiveHash(local_archive_file)
263       if verified_hash != archive_desc.hash:
264         raise IOError('Package hash check failed: %s != %s' %
265                       (verified_hash, archive_desc.hash))
266
267       downloaded_files.append(local_archive_file)
268
269   # Delete any stale left over packages.
270   for old_archive in old_archives:
271     archive_file = package_locations.GetLocalPackageArchiveFile(
272         tar_dir,
273         package_target,
274         package_name,
275         old_archive)
276     os.unlink(archive_file)
277
278   # Save the package file so we know what we currently have.
279   package_desc.SavePackageFile(local_package_file)
280
281   return downloaded_files
282
283
284 def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
285                            extra_archives=[]):
286   """Archives local package archives to the tar directory.
287
288   Args:
289     tar_dir: Root tar directory where archives live.
290     package_target: Package target of the package to archive.
291     package_name: Package name of the package to archive.
292     archives: List of archive file paths where archives currently live.
293     extra_archives: Extra archives that are expected to be build elsewhere.
294   Returns:
295     Returns the local package file that was archived.
296   """
297   local_package_file = package_locations.GetLocalPackageFile(tar_dir,
298                                                              package_target,
299                                                              package_name)
300
301   valid_archive_files = set()
302   archive_list = []
303
304   package_desc = package_info.PackageInfo()
305   package_archives = ([(archive, False) for archive in archives] +
306                       [(archive, True) for archive in extra_archives])
307   for archive, skip_missing in package_archives:
308     archive_url = None
309     if '@' in archive:
310       archive, archive_url = archive.split('@', 1)
311
312     extract_param = ''
313     tar_src_dir = ''
314     extract_dir = ''
315     if ',' in archive:
316       archive, extract_param = archive.split(',', 1)
317       if ':' in extract_param:
318         tar_src_dir, extract_dir = extract_param.split(':', 1)
319       else:
320         tar_src_dir = extract_param
321
322     archive_hash = archive_info.GetArchiveHash(archive)
323     archive_name = os.path.basename(archive)
324     archive_desc = archive_info.ArchiveInfo(archive_name,
325                                             archive_hash,
326                                             url=archive_url,
327                                             tar_src_dir=tar_src_dir,
328                                             extract_dir=extract_dir)
329     package_desc.AppendArchive(archive_desc)
330
331     if archive_hash is None:
332       if skip_missing:
333         logging.info('Skipping archival of missing file: %s', archive)
334         continue
335       raise IOError('Invalid package: %s.' % archive)
336     archive_list.append(archive)
337
338     archive_basename = os.path.basename(archive)
339     archive_json = archive_basename + '.json'
340     valid_archive_files.update([archive_basename, archive_json])
341
342   # Delete any stale archive files
343   local_archive_dir = package_locations.GetLocalPackageArchiveDir(
344       tar_dir,
345       package_target,
346       package_name)
347
348   if os.path.isdir(local_archive_dir):
349     for dir_item in os.listdir(local_archive_dir):
350       if dir_item in valid_archive_files:
351         continue
352
353       item_path = os.path.join(local_archive_dir, dir_item)
354       if os.path.isdir(item_path):
355         pynacl.file_tools.RemoveDir(item_path)
356       else:
357         pynacl.file_tools.RemoveFile(item_path)
358
359   # We do not need to archive the package if it already matches. But if the
360   # local package file is invalid or does not match, then we should recreate
361   # the json file.
362   if os.path.isfile(local_package_file):
363     try:
364       current_package_desc = package_info.PackageInfo(local_package_file,
365                                                       skip_missing=True)
366       if current_package_desc == package_desc:
367         return
368     except ValueError:
369       pass
370
371   # Copy each of the packages over to the tar directory first.
372   for archive_file in archive_list:
373     archive_name = os.path.basename(archive_file)
374     local_archive_file = package_locations.GetLocalPackageArchiveFile(
375         tar_dir,
376         package_target,
377         package_name,
378         archive_name)
379
380     logging.info('Archiving file: %s', archive_file)
381     pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
382     shutil.copyfile(archive_file, local_archive_file)
383
384   # Once all the copying is completed, update the local packages file.
385   logging.info('Package "%s" archived: %s', package_name, local_package_file)
386   pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
387   package_desc.SavePackageFile(local_package_file)
388
389   return local_package_file
390
391
392 def UploadPackage(storage, revision, tar_dir, package_target, package_name,
393                   is_shared_package, annotate=False, skip_missing=False,
394                   custom_package_file=None):
395   """Uploads a local package file to the supplied cloud storage object.
396
397   By default local package files are expected to be found in the standardized
398   location within the tar directory, however a custom package file may be
399   specified to upload from a different location. Package archives that do not
400   have their URL field set will automaticaly have the archives uploaded so that
401   someone accessing the package file from the cloud storage will also have
402   access to the package archives.
403
404   Args:
405     storage: Cloud storage object which supports PutFile and GetFile.
406     revision: SVN Revision number the package should be associated with.
407     tar_dir: Root tar directory where archives live.
408     package_target: Package target of the package to archive.
409     package_name: Package name of the package to archive.
410     is_shared_package: Is this package shared among all package targets?
411     annotate: Print annotations for build bots?
412     skip_missing: Skip missing package archive files?
413     custom_package_file: File location for a custom package file.
414   Returns:
415     Returns remote download key for the uploaded package file.
416   """
417   if custom_package_file is not None:
418     local_package_file = custom_package_file
419   else:
420     local_package_file = package_locations.GetLocalPackageFile(
421         tar_dir,
422         package_target,
423         package_name)
424
425   # Upload the package file and also upload any local package archives so
426   # that they are downloadable.
427   package_desc = package_info.PackageInfo(local_package_file,
428                                           skip_missing=skip_missing)
429   upload_package_desc = package_info.PackageInfo()
430
431   for archive_obj in package_desc.GetArchiveList():
432     archive_desc = archive_obj.GetArchiveData()
433     url = archive_desc.url
434     if archive_desc.hash and url is None:
435       if annotate:
436         print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name
437
438       archive_file = package_locations.GetLocalPackageArchiveFile(
439           tar_dir,
440           package_target,
441           package_name,
442           archive_desc.name)
443       archive_hash = archive_info.GetArchiveHash(archive_file)
444       if archive_hash is None:
445         raise IOError('Missing Archive File: %s' % archive_file)
446       elif archive_hash != archive_desc.hash:
447         raise IOError(
448             'Archive hash does not match package hash: %s' % archive_file
449             + '\n  Archive Hash: %s' % archive_hash
450             + '\n  Package Hash: %s' % archive_desc.hash)
451
452       logging.warn('Missing archive URL: %s', archive_desc.name)
453       logging.warn('Uploading archive to be publically available...')
454       remote_archive_key = package_locations.GetRemotePackageArchiveKey(
455           archive_desc.name,
456           archive_desc.hash)
457       url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
458       if annotate:
459         print '@@@STEP_LINK@download@%s@@@' % url
460
461     archive_desc = archive_info.ArchiveInfo(
462         archive_desc.name,
463         archive_desc.hash,
464         url=url,
465         tar_src_dir=archive_desc.tar_src_dir,
466         extract_dir=archive_desc.extract_dir)
467     upload_package_desc.AppendArchive(archive_desc)
468
469   upload_package_file = local_package_file + '.upload'
470   pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
471   upload_package_desc.SavePackageFile(upload_package_file)
472
473   logging.info('Uploading package information: %s', package_name)
474   remote_package_key = package_locations.GetRemotePackageKey(
475       is_shared_package,
476       revision,
477       package_target,
478       package_name)
479   package_info.UploadPackageInfoFiles(storage, package_target, package_name,
480                                       remote_package_key, upload_package_file,
481                                       skip_missing=skip_missing,
482                                       annotate=annotate)
483
484   return remote_package_key
485
486
487 def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
488                           downloader=None, skip_missing=False, quiet=False):
489   """Extracts package targets from the tar directory to the destination.
490
491   Each package archive within a package will be verified before being
492   extracted. If a package archive does not exist or does not match the hash
493   stored within the package file, it will be re-downloaded before being
494   extracted.
495
496   Args:
497     package_target_packages: List of tuples of package target and package names.
498     tar_dir: Source tar directory where package archives live.
499     dest_dir: Root destination directory where packages will be extracted to.
500     downloader: function which takes a url and a file path for downloading.
501   """
502   if downloader is None:
503     downloader = pynacl.gsd_storage.HttpDownload
504
505   for package_target, package_name in package_target_packages:
506     package_file = package_locations.GetLocalPackageFile(tar_dir,
507                                                          package_target,
508                                                          package_name)
509     package_desc = package_info.PackageInfo(package_file,
510                                             skip_missing=skip_missing)
511     dest_package_dir = package_locations.GetFullDestDir(dest_dir,
512                                                         package_target,
513                                                         package_name)
514     dest_package_file = package_locations.GetDestPackageFile(dest_dir,
515                                                              package_target,
516                                                              package_name)
517
518     # Only do the extraction if the extract packages do not match.
519     if os.path.isfile(dest_package_file):
520       try:
521         dest_package_desc = package_info.PackageInfo(dest_package_file)
522         if dest_package_desc == package_desc:
523           logging.debug('Skipping extraction for package (%s)', package_name)
524           continue
525       except:
526         # Destination package file cannot be trusted, if invalid re-extract.
527         pass
528
529       # Delete the old package file before we extract.
530       os.unlink(dest_package_file)
531
532     if os.path.isdir(dest_package_dir):
533       logging.debug('Deleting old package directory: %s', dest_package_dir)
534       pynacl.file_tools.RemoveDir(dest_package_dir)
535
536     logging.info('Extracting package (%s) to directory: %s',
537                  package_name, dest_package_dir)
538     archive_list = package_desc.GetArchiveList()
539     num_archives = len(archive_list)
540     for index, archive_obj in enumerate(archive_list):
541       archive_desc = archive_obj.GetArchiveData()
542       archive_file = package_locations.GetLocalPackageArchiveFile(
543           tar_dir,
544           package_target,
545           package_name,
546           archive_desc.name
547       )
548
549       # Upon extraction, some files may not be downloaded (or have stale files),
550       # we need to check the hash of each file and attempt to download it if
551       # they do not match.
552       archive_hash = archive_info.GetArchiveHash(archive_file)
553       if archive_hash != archive_desc.hash:
554         if archive_desc.url is None:
555           if skip_missing:
556             logging.info('Skipping extraction of missing archive: %s' %
557                          archive_file)
558             continue
559           raise IOError('Invalid archive file and URL: %s' % archive_file)
560
561         logging.warn('Expected archive missing, downloading: %s',
562                      archive_desc.name)
563
564         pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
565         downloader(archive_desc.url, archive_file)
566         archive_hash = archive_info.GetArchiveHash(archive_file)
567         if archive_hash != archive_desc.hash:
568           raise IOError('Downloaded archive file does not match hash.'
569                         ' [%s] Expected %s, received %s.' %
570                         (archive_file, archive_desc.hash, archive_hash))
571
572       destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
573       logging.info('Extracting %s (%d/%d)' %
574                    (archive_desc.name, index+1, num_archives))
575
576       temp_dir = os.path.join(destination_dir, '.tmp')
577       pynacl.file_tools.RemoveDir(temp_dir)
578       os.makedirs(temp_dir)
579       tar_output = not quiet
580       tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
581       curdir = os.getcwd()
582       os.chdir(temp_dir)
583       try:
584         tar.Extract()
585         tar.Close()
586       finally:
587         os.chdir(curdir)
588
589       temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
590       pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
591       pynacl.file_tools.RemoveDir(temp_dir)
592
593     pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
594     package_desc.SavePackageFile(dest_package_file)
595
596
597 #
598 # Each Command has 2 functions that describes it:
599 #   1. A parser function which specifies the extra command options each command
600 #   will have.
601 #   2. An execution function which is called when a user actually executes
602 #   the command.
603 #
604 def _ListCmdArgParser(subparser):
605   subparser.description = 'Lists package information.'
606
607
608 def _DoListCmd(arguments):
609   package_targets = collections.defaultdict(list)
610   for package_target, package in arguments.package_target_packages:
611     package_targets[package_target].append(package)
612
613   print 'Listing Package Targets and Packages:'
614   for package_target, packages in package_targets.iteritems():
615     print '\n%s:' % package_target
616     for package in sorted(packages):
617       print ' ', package
618
619
620 def _ArchiveCmdArgParser(subparser):
621   subparser.description = 'Archive package archives to tar directory.'
622   subparser.add_argument(
623     '--archive-package', metavar='NAME', dest='archive__package',
624     required=True,
625     help='Package name archives will be packaged into.')
626   subparser.add_argument(
627     '--extra-archive', metavar='ARCHIVE', dest='archive__extra_archive',
628     action='append', default=[],
629     help='Extra archives that are expected to be built elsewhere.')
630   subparser.add_argument(
631     metavar='TAR(,SRCDIR(:EXTRACTDIR))(@URL)', dest='archive__archives',
632     nargs='+',
633     help='Package archive with an optional tar information and url.'
634          ' SRCDIR is the root directory where files live inside of the tar.'
635          ' EXTRACTDIR is the directory to extract files to relative to the'
636          ' destination directory. The URL is where the package can be'
637          ' downloaded from.')
638   subparser.add_argument(
639     '-x', '--extract', dest='archive__extract',
640     action='store_true', default=False,
641     help='Extract package archives after they have been archived.')
642
643
644 def _DoArchiveCmd(arguments):
645   package_target_packages = GetPackageTargetPackages(
646       arguments.archive__package,
647       arguments.package_target_packages
648   )
649   if not package_target_packages:
650     raise NameError('Unknown package: %s.' % arguments.archive__package
651                     + ' Did you forget to add "$PACKAGE_TARGET/"?')
652
653   for package_target, package_name in package_target_packages:
654     ArchivePackageArchives(arguments.tar_dir,
655                            package_target,
656                            package_name,
657                            arguments.archive__archives,
658                            extra_archives=arguments.archive__extra_archive)
659
660     if arguments.archive__extract:
661       ExtractPackageTargets([(package_target, package_name)],
662                             arguments.tar_dir,
663                             arguments.dest_dir,
664                             skip_missing=True,
665                             quiet=arguments.quiet)
666
667
668 def _ExtractCmdArgParser(subparser):
669   subparser.description = 'Extract packages from tar directory.'
670
671
672 def _DoExtractCmd(arguments):
673   ExtractPackageTargets(
674       arguments.package_target_packages,
675       arguments.tar_dir,
676       arguments.dest_dir,
677       quiet=arguments.quiet)
678
679
680 def _UploadCmdArgParser(subparser):
681   subparser.description = 'Upload a package file.'
682   subparser.add_argument(
683     '--upload-package', metavar='NAME', dest='upload__package', required=True,
684     help='Package to upload.')
685   subparser.add_argument(
686     '--revision', metavar='NUM', dest='upload__revision', required=True,
687     help='SVN Revision of the package to upload.')
688   subparser.add_argument(
689     '--package-file', metavar='FILE', dest='upload__file',
690     default=None,
691     help='Use custom package file instead of standard package file found'
692          ' in the tar directory.')
693   subparser.add_argument(
694     '--skip-missing', dest='upload__skip_missing',
695     action='store_true', default=False,
696     help='Skip missing archive files when uploading package archives.')
697
698
699 def _DoUploadCmd(arguments):
700   package_target_packages = GetPackageTargetPackages(
701       arguments.upload__package,
702       arguments.package_target_packages
703   )
704   if not package_target_packages:
705     raise NameError('Unknown package: %s.' % arguments.upload__package
706                     + ' Did you forget to add "$PACKAGE_TARGET/"?')
707
708   for package_target, package_name in package_target_packages:
709     UploadPackage(
710         arguments.gsd_store,
711         arguments.upload__revision,
712         arguments.tar_dir,
713         package_target,
714         package_name,
715         arguments.packages_desc.IsSharedPackage(package_name),
716         annotate=arguments.annotate,
717         skip_missing=arguments.upload__skip_missing,
718         custom_package_file=arguments.upload__file
719     )
720
721
722 def _SyncCmdArgParser(subparser):
723   subparser.description = 'Download package archives to the tar directory.'
724   subparser.add_argument(
725     '--revision', metavar='NUM', dest='sync__revision',
726     default=None,
727     help='SVN Revision of the packages to download.')
728   subparser.add_argument(
729     '-x', '--extract', dest='sync__extract',
730     action='store_true', default=False,
731     help='Extract package arcvhies after they have been downloaded.')
732
733
734 def _DoSyncCmd(arguments):
735   # TODO(dyen): remove this section eventually.
736   # Before syncing, remove any old toolchain files temporarily.
737   RemoveOldToolchainFiles(arguments.dest_dir)
738
739   for package_target, package_name in arguments.package_target_packages:
740     if arguments.sync__revision is None:
741       # When the sync revision number is not specified, use the set
742       # revision number found in the revision directory.
743       revision_file = package_locations.GetRevisionFile(
744           arguments.revisions_dir,
745           package_name)
746       revision_desc = revision_info.RevisionInfo(
747           arguments.packages_desc,
748           revision_file)
749       package_desc = revision_desc.GetPackageInfo(package_target)
750       revision_num = revision_desc.GetRevisionNumber()
751     else:
752       # When the sync revision number is specified, find the package to
753       # download remotely using the revision.
754       revision_num = arguments.sync__revision
755       remote_package_key = package_locations.GetRemotePackageKey(
756           arguments.packages_desc.IsSharedPackage(package_name),
757           arguments.sync__revision,
758           package_target,
759           package_name)
760       with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
761         temp_package_file = os.path.join(
762             work_dir,
763             os.path.basename(remote_package_key) + TEMP_SUFFIX)
764
765         package_info.DownloadPackageInfoFiles(
766             temp_package_file,
767             remote_package_key,
768             downloader=arguments.gsd_store.GetFile)
769
770         package_desc = package_info.PackageInfo(temp_package_file)
771
772     DownloadPackageArchives(
773         arguments.tar_dir,
774         package_target,
775         package_name,
776         package_desc,
777         revision_num=revision_num)
778
779   CleanTempFiles(arguments.tar_dir)
780
781   if arguments.sync__extract:
782     ExtractPackageTargets(
783         arguments.package_target_packages,
784         arguments.tar_dir,
785         arguments.dest_dir,
786         quiet=arguments.quiet)
787
788
789 def _SetRevisionCmdArgParser(subparser):
790   subparser.description = 'Specify the revision of a package.'
791   subparser.add_argument(
792     '--revision-package', metavar='NAME', dest='setrevision__package',
793     required=True,
794     help='Package name to set revision of.')
795   subparser.add_argument(
796     '--revision', metavar='NUM', dest='setrevision__revision',
797     type=int, required=True,
798     help='SVN Revision of the package to set.')
799
800
801 def _DoSetRevisionCmd(arguments):
802   package_name = arguments.setrevision__package
803   revision_num = arguments.setrevision__revision
804
805   revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
806   revision_desc.SetRevisionNumber(revision_num)
807
808   custom_package_targets = GetPackageTargetPackages(package_name, [])
809   if not custom_package_targets:
810     package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
811         package_name)
812   else:
813     package_targets = [target[0] for target in custom_package_targets]
814     first_target = custom_package_targets[0]
815     package_name = first_target[1]
816
817   with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
818     for package_target in package_targets:
819       remote_package_key = package_locations.GetRemotePackageKey(
820           arguments.packages_desc.IsSharedPackage(package_name),
821           revision_num,
822           package_target,
823           package_name)
824
825       temp_package_file = os.path.join(
826           work_dir,
827           os.path.basename(remote_package_key) + TEMP_SUFFIX)
828
829       package_info.DownloadPackageInfoFiles(
830           temp_package_file,
831           remote_package_key,
832           downloader=arguments.gsd_store.GetFile)
833
834       package_desc = package_info.PackageInfo(temp_package_file)
835
836       logging.info('Setting %s:%s to revision %s',
837                    package_target, package_name, revision_num)
838       revision_desc.SetTargetRevision(
839           package_name,
840           package_target,
841           package_desc)
842
843   revision_file = package_locations.GetRevisionFile(
844       arguments.revisions_dir,
845       package_name)
846   pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
847   revision_desc.SaveRevisionFile(revision_file)
848
849   CleanTempFiles(arguments.revisions_dir)
850
851
852 def _GetRevisionCmdArgParser(subparser):
853   subparser.description = 'Get the revision of a package.'
854   subparser.add_argument(
855     '--revision-package', metavar='NAME', dest='getrevision__package',
856     required=True,
857     help='Package name to get revision of.')
858
859
860 def _DoGetRevisionCmd(arguments):
861   package_name = arguments.getrevision__package
862
863   custom_package_targets = GetPackageTargetPackages(package_name, [])
864   if custom_package_targets:
865     custom_target, package_name = custom_package_targets[0]
866
867   revision_file = package_locations.GetRevisionFile(arguments.revisions_dir,
868                                                     package_name)
869
870   if not os.path.isfile(revision_file):
871     raise NameError('No revision set for package: %s.' % package_name)
872
873   revision_desc = revision_info.RevisionInfo(arguments.packages_desc,
874                                              revision_file)
875   print revision_desc.GetRevisionNumber()
876
877
878 def _FillEmptyTarsParser(subparser):
879   subparser.description = 'Fill missing archives with empty ones in a package.'
880   subparser.add_argument(
881     '--fill-package', metavar='NAME', dest='fillemptytars_package',
882     required=True,
883     help='Package name to fill empty archives of.')
884
885
886 def _DoFillEmptyTarsCmd(arguments):
887   package_target_packages = GetPackageTargetPackages(
888       arguments.fillemptytars_package,
889       arguments.package_target_packages
890   )
891   if not package_target_packages:
892     raise NameError('Unknown package: %s.' % arguments.fillemptytars_package
893                     + ' Did you forget to add "$PACKAGE_TARGET/"?')
894
895   for package_target, package_name in package_target_packages:
896     package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
897                                                          package_target,
898                                                          package_name)
899
900     package_desc = package_info.PackageInfo(package_path, skip_missing=True)
901     output_package_desc = package_info.PackageInfo()
902     for archive in package_desc.GetArchiveList():
903       # If archive does not exist, fill it with an empty one.
904       archive_data = archive.GetArchiveData()
905       if archive_data.hash:
906         output_package_desc.AppendArchive(archive)
907       else:
908         logging.info('Filling missing archive: %s.', archive_data.name)
909         if (archive_data.name.endswith('.tar.gz') or
910             archive_data.name.endswith('.tgz')):
911           mode = 'w:gz'
912         elif archive_data.name.endswith('.bz2'):
913           mode = 'w:bz2'
914         elif archive_data.name.endswith('.tar'):
915           mode = 'w:'
916         else:
917           raise NameError('Unknown archive type: %s.' % archive_data.name)
918
919         archive_file = package_locations.GetLocalPackageArchiveFile(
920             arguments.tar_dir,
921             package_target,
922             package_name,
923             archive_data.name
924             )
925
926         tar_file = cygtar.CygTar(archive_file, mode)
927         tar_file.Close()
928         tar_hash = archive_info.GetArchiveHash(archive_file)
929
930         empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
931                                                  archive_hash=tar_hash)
932         output_package_desc.AppendArchive(empty_archive)
933
934     output_package_desc.SavePackageFile(package_path)
935
936
937 CommandFuncs = collections.namedtuple(
938     'CommandFuncs',
939     ['parse_func', 'do_cmd_func'])
940
941
942 COMMANDS = {
943     'list': CommandFuncs(_ListCmdArgParser, _DoListCmd),
944     'archive': CommandFuncs(_ArchiveCmdArgParser, _DoArchiveCmd),
945     'extract': CommandFuncs(_ExtractCmdArgParser, _DoExtractCmd),
946     'upload': CommandFuncs(_UploadCmdArgParser, _DoUploadCmd),
947     'sync': CommandFuncs(_SyncCmdArgParser, _DoSyncCmd),
948     'setrevision': CommandFuncs(_SetRevisionCmdArgParser, _DoSetRevisionCmd),
949     'getrevision': CommandFuncs(_GetRevisionCmdArgParser, _DoGetRevisionCmd),
950     'fillemptytars': CommandFuncs(_FillEmptyTarsParser, _DoFillEmptyTarsCmd),
951 }
952
953
954 def ParseArgs(args):
955   parser = argparse.ArgumentParser()
956
957   host_platform = pynacl.platform.GetOS()
958   host_arch = pynacl.platform.GetArch3264()
959
960   # List out global options for all commands.
961   parser.add_argument(
962     '-v', '--verbose', dest='verbose',
963     action='store_true', default=False,
964     help='Verbose output')
965   parser.add_argument(
966     '-q', '--quiet', dest='quiet',
967     action='store_true', default=False,
968     help='Quiet output')
969   parser.add_argument(
970     '--platform', dest='host_platform',
971     default=host_platform,
972     help='Custom platform other than the current (%s).' % host_platform)
973   parser.add_argument(
974     '--arch', dest='host_arch',
975     default=host_arch,
976     help='Custom architecture other than the current (%s).' % host_arch)
977   parser.add_argument(
978     '--package-targets', dest='package_targets',
979     default=None,
980     help='Custom package targets specifed as comma separated names. Defaults'
981          ' to package targets defined for host platform and architecture inside'
982          ' of the packages json file.')
983   parser.add_argument(
984     '--packages', dest='packages',
985     default=None,
986     help='Custom packages specified as comma separated package names. Custom'
987          ' packages not defined by the packages json file must be prefixed by'
988          ' the package_target directory (IE. $PACKAGE_TARGET/$PACKAGE).')
989   parser.add_argument(
990     '--append', metavar='PACKAGE', dest='append_packages',
991     action='append', default=[],
992     help='Append extra package to current list of packages.')
993   parser.add_argument(
994     '--exclude', metavar='PACKAGE', dest='exclude_packages',
995     action='append', default=[],
996     help='Exclude package from current list of packages.')
997   parser.add_argument(
998     '--packages-json', dest='packages_json',
999     default=DEFAULT_PACKAGES_JSON, type=argparse.FileType('rt'),
1000     help='Packages description file.'
1001          ' [Default: %s]' % DEFAULT_PACKAGES_JSON)
1002   parser.add_argument(
1003     '--revisions-dir', dest='revisions_dir',
1004     default=DEFAULT_REVISIONS_DIR,
1005     help='Revisions directory where packages revisions will be found.')
1006   parser.add_argument(
1007     '--dest-dir', dest='dest_dir',
1008     default=DEFAULT_DEST_DIR,
1009     help='Destination directory where all the packages will be extracted to.')
1010   parser.add_argument(
1011     '--tar-dir', dest='tar_dir',
1012     default=None,
1013     help='Directory for package archive files. Defaults to "$DEST-DIR/.tars".')
1014   parser.add_argument(
1015     '--annotate', dest='annotate',
1016     action='store_true', default=False,
1017     help='Print out build bot annotations.')
1018   parser.add_argument(
1019     '--cloud-bucket', dest='cloud_bucket',
1020     default=DEFAULT_CLOUD_BUCKET,
1021     help='Google storage cloud bucket name.'
1022          ' [Default: %s]' % DEFAULT_CLOUD_BUCKET)
1023
1024   # Add subparsers for all commands. These are flags for specific commands,
1025   # IE. [options] command [command-options]
1026   command_parser = parser.add_subparsers(title='command', dest='command')
1027   for command, cmd_funcs in COMMANDS.iteritems():
1028     sub_parser = command_parser.add_parser(command)
1029     cmd_funcs.parse_func(sub_parser)
1030
1031   arguments = parser.parse_args(args)
1032   pynacl.log_tools.SetupLogging(arguments.verbose, quiet=arguments.quiet)
1033   if arguments.tar_dir is None:
1034     arguments.tar_dir = os.path.join(arguments.dest_dir, '.tars')
1035
1036   # Parse the package description up front and store it into the arguments
1037   # object. Almost all the commands need to use this information.
1038   packages_desc = packages_info.PackagesInfo(arguments.packages_json)
1039   arguments.packages_desc = packages_desc
1040
1041   # Based on the host platform and host architecture, we can determine the set
1042   # of package targets used from the packages description. Minimize platform
1043   # and architecture errors by standardizing the names using pynacl.platform.
1044   if arguments.package_targets is None:
1045     package_targets = packages_desc.GetPackageTargets(
1046         pynacl.platform.GetOS(arguments.host_platform),
1047         pynacl.platform.GetArch3264(arguments.host_arch))
1048   else:
1049     package_targets = arguments.package_targets.split(',')
1050
1051   # If the packages argument were not set, use the default list of packages
1052   # for each package target.
1053   packages_set = set()
1054   if arguments.packages is None:
1055     for package_target in package_targets:
1056       packages = packages_desc.GetPackages(package_target)
1057       if packages is None:
1058         raise NameError('No packages defined for Package Target: %s.' %
1059                         package_target)
1060       packages_set.update(packages)
1061   else:
1062     packages_set.update(arguments.packages.split(','))
1063
1064   # Append/exclude any extra packages that were specified.
1065   packages_set.update(arguments.append_packages)
1066   packages_set.difference_update(arguments.exclude_packages)
1067
1068   # Build a dictionary that organizes packages to their respective package
1069   # targets. Packages may exist in multiple package targets so we will have
1070   # to have the key be package and value be a list of package targets.
1071   package_targets_dict = collections.defaultdict(list)
1072   for package_target in package_targets:
1073     for package in packages_desc.GetPackages(package_target):
1074       package_targets_dict[package].append(package_target)
1075
1076   # Use the list of packages to determine the set of package target packages
1077   # we are operating on, custom package targets will have the package target
1078   # inside of the name of the package name (see help for "--packages" argument).
1079   # The package_target_packages is a list of tuples (package_target, package),
1080   # for every package along with the associated package target.
1081   package_target_packages = []
1082   for package in sorted(packages_set):
1083     package_targets = package_targets_dict.get(package, None)
1084     if package_targets is None:
1085       custom_package_targets = GetPackageTargetPackages(package, [])
1086       if not custom_package_targets:
1087         raise NameError('Invalid custom package: "%s".' % package
1088                         + ' Expected $PACKAGE_TARGET'
1089                         + os.path.sep
1090                         + '$PACKAGE')
1091       package_target_packages.extend(custom_package_targets)
1092     else:
1093       for package_target in package_targets:
1094         package_target_packages.append((package_target, package))
1095
1096   arguments.package_target_packages = package_target_packages
1097
1098   # Create a GSD Storage object for those who need it.
1099   cloud_bucket = arguments.cloud_bucket
1100   gsd_store = pynacl.gsd_storage.GSDStorage(cloud_bucket, [cloud_bucket])
1101   arguments.gsd_store = gsd_store
1102
1103   return arguments
1104
1105
1106 def main(args):
1107   arguments = ParseArgs(args)
1108   return COMMANDS[arguments.command].do_cmd_func(arguments)
1109
1110
1111 if __name__ == '__main__':
1112   sys.exit(main(sys.argv[1:]))