Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / native_client / build / package_version / package_version.py
1 #!/usr/bin/python
2 # Copyright (c) 2014 The Native Client Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """This script handles all of the processing for versioning packages.
7
8 package_version.py manages all of the various operations done between
9 packages, including archiving, extracting, uploading, and downloading
10 packages. For a list of options and commands, see the help for the script.
11
12 Glossary:
13   Package: A list of archives, such as "nacl_x86_glibc" or "nacl_x86_newlib".
14   Package Archive: An archive (usually a tar file) that is part of a package.
15   Package Target: Package targets consists of packages. Each package target
16     has it's own version of a package. An example of a package target would
17     be something such as "win_x86_nacl_x86" or "mac_x86_nacl_x86". In that case,
18     "win_x86_nacl_x86" and "mac_x86_nacl_x86" would each have their own version
19     of "nacl_x86_glibc" and "nacl_x86_newlib" for windows and mac respectively.
20   Revision Number: The SVN revision number of a sanctioned version. This number
21     is used to synchronize packages to sanctioned versions.
22
23 JSON Files:
24   Packages File - A file which describes the various package targets for each
25     platform/architecture along with the packages associated with each package
26     target.
27     [Default file: build/package_version/standard_packages.json].
28   Package File - A file which contains the list of package archives within
29     a package.
30     [Default file: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE.json]
31   Archive File - A file which describes an archive within a package. Each
32     archive description file will contain information about an archive such
33     as name, URL to download from, and hash.
34     [Default File: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE/$ARCHIVE.json]
35   Revision File - A file which describes the sanctioned version of package
36     for each of the package targets associated with it.
37     [Default file: toolchain_revisions/$PACKAGE.json]
38 """
39
40 import argparse
41 import collections
42 import logging
43 import os
44 import shutil
45 import sys
46
47 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
48 sys.path.append(os.path.dirname(SCRIPT_DIR))
49 import cygtar
50
51 sys.path.append(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
52 import pynacl.file_tools
53 import pynacl.gsd_storage
54 import pynacl.log_tools
55 import pynacl.platform
56 import pynacl.working_directory
57
58 import archive_info
59 import error
60 import package_info
61 import package_locations
62 import packages_info
63 import revision_info
64
65 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
66 BUILD_DIR = os.path.dirname(CURRENT_DIR)
67 NACL_DIR = os.path.dirname(BUILD_DIR)
68
69 TEMP_SUFFIX = '.tmp'
70
71 DEFAULT_PACKAGES_JSON = os.path.join(CURRENT_DIR, 'standard_packages.json')
72 DEFAULT_REVISIONS_DIR = os.path.join(NACL_DIR, 'toolchain_revisions')
73 DEFAULT_DEST_DIR = os.path.join(NACL_DIR, 'toolchain')
74 DEFAULT_CLOUD_BUCKET = 'nativeclient-archive2'
75
76
77 #
78 # These are helper functions that help each command.
79 #
80
81 def CleanTempFiles(directory):
82   """Cleans up all temporary files ending with TEMP_SUFFIX in a directory."""
83   for root, dirs, files in os.walk(directory):
84     for file_name in files:
85       if file_name.endswith(TEMP_SUFFIX):
86         file_path = os.path.join(root, file_name)
87         os.unlink(file_path)
88
89
90 def GetPackageTargetPackages(custom_package_name, package_target_packages):
91   """Returns a list of package target packages given a custom package name.
92
93   A custom package name can either have a specified package target attached
94   to it (IE. $PACKAGE_TARGET/PACKAGE_NAME) or be extracted out of a default
95   list of package targets along with their packages.
96
97   Args:
98     custom_package_name: Package name with an optional package target.
99     package_target_packages: List of tuples (package_target, package).
100   Returns:
101     List of package target tuples matching the package name.
102   """
103   package_path = custom_package_name.replace('\\', os.path.sep)
104   package_path = package_path.replace('/', os.path.sep)
105   if os.path.sep in package_path:
106     # Package target is part of the custom package name, just return it.
107     package_target, package_name = package_path.split(os.path.sep, 1)
108     return [(package_target, package_name)]
109
110   # Package target is not part of the package name, filter from list of passed
111   # in package targets.
112   return [
113       (package_target, package)
114       for package_target, package in package_target_packages
115       if package == custom_package_name
116   ]
117
118
119 def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
120                             downloader=None, revision_num=None,
121                             include_logs=False):
122   """Downloads package archives from the cloud to the tar directory.
123
124   Args:
125     tar_dir: Root tar directory where archives will be downloaded to.
126     package_target: Package target of the package to download.
127     package_name: Package name of the package to download.
128     package_desc: package_info object of the package to download.
129     downloader: function which takes a url and a file path for downloading.
130   Returns:
131     The list of files that were downloaded.
132   """
133   downloaded_files = []
134   if downloader is None:
135     downloader = pynacl.gsd_storage.HttpDownload
136   local_package_file = package_locations.GetLocalPackageFile(tar_dir,
137                                                              package_target,
138                                                              package_name)
139   # To ensure that we do not redownload extra archives that we already have,
140   # create a dictionary of old package archives that contains the hash of each
141   # package archive.
142   old_archives = {}
143   if os.path.isfile(local_package_file):
144     try:
145       old_package_desc = package_info.PackageInfo(local_package_file)
146       old_archives_list = old_package_desc.GetArchiveList()
147       old_archive_names = [archive.GetArchiveData().name
148                            for archive
149                            in old_archives_list]
150       for archive_name in old_archive_names:
151         archive_file = package_locations.GetLocalPackageArchiveFile(
152             tar_dir,
153             package_target,
154             package_name,
155             archive_name
156             )
157
158         archive_hash = archive_info.GetArchiveHash(archive_file)
159         if archive_hash is not None:
160           old_archives[archive_name] = archive_hash
161     except:
162       # Nothing can be trusted here anymore, delete all package archives.
163       archive_directory = package_locations.GetLocalPackageArchiveDir(
164           tar_dir,
165           package_target,
166           package_name
167           )
168       os.unlink(local_package_file)
169       pynacl.file_tools.RemoveDir(archive_directory)
170
171   # Download packages information file along with each of the package
172   # archives described in the information file. Also keep track of what
173   # new package names matches old package names. We will have to delete
174   # stale package names after we are finished.
175   update_archives = []
176   for archive_obj in package_desc.GetArchiveList():
177     archive_desc = archive_obj.GetArchiveData()
178     old_hash = old_archives.get(archive_desc.name, None)
179     if old_hash is not None:
180       old_archives.pop(archive_desc.name)
181       if archive_desc.hash == old_hash:
182         logging.debug('Skipping matching archive: %s', archive_desc.name)
183         continue
184     update_archives.append(archive_obj)
185
186   if update_archives:
187     logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
188     num_archives = len(update_archives)
189     for index, archive_obj in enumerate(update_archives):
190       archive_desc = archive_obj.GetArchiveData()
191       local_archive_file = package_locations.GetLocalPackageArchiveFile(
192           tar_dir,
193           package_target,
194           package_name,
195           archive_desc.name
196       )
197       pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
198
199       if archive_desc.url is None:
200         raise error.Error('Error, no URL for archive: %s' % archive_desc.name)
201
202       logging.info('Downloading package archive: %s (%d/%d)' %
203                    (archive_desc.name, index+1, num_archives))
204       try:
205         downloader(archive_desc.url, local_archive_file)
206       except Exception as e:
207         raise error.Error('Could not download URL (%s): %s' %
208                           (archive_desc.url, e))
209
210       # Delete any stale log files
211       local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
212           local_archive_file
213       )
214       if os.path.isfile(local_archive_log):
215         os.unlink(local_archive_log)
216
217       verified_hash = archive_info.GetArchiveHash(local_archive_file)
218       if verified_hash != archive_desc.hash:
219         raise error.Error('Package hash check failed: %s != %s' %
220                           (verified_hash, archive_desc.hash))
221
222       downloaded_files.append(local_archive_file)
223
224   # Download any logs if include_logs is True.
225   if include_logs:
226     download_logs = []
227     for archive_obj in package_desc.GetArchiveList():
228       archive_desc = archive_obj.GetArchiveData()
229       if archive_desc.log_url:
230         local_archive_file = package_locations.GetLocalPackageArchiveFile(
231             tar_dir,
232             package_target,
233             package_name,
234             archive_desc.name
235         )
236         local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
237             local_archive_file
238         )
239         if not os.path.isfile(local_archive_log):
240           download_log_tuple = (archive_desc.name,
241                                 archive_desc.log_url,
242                                 local_archive_log)
243           download_logs.append(download_log_tuple)
244
245     if download_logs:
246       logging.info('--Syncing %s Logs--' % (package_name))
247       num_logs = len(download_logs)
248       for index, download_log_tuple in enumerate(download_logs):
249         name, log_url, local_log_file = download_log_tuple
250         logging.info('Downloading archive log: %s (%d/%d)' %
251                      (name, index+1, num_logs))
252
253         try:
254           downloader(log_url, local_log_file)
255         except Exception as e:
256           raise IOError('Could not download log URL (%s): %s' %
257                         (log_url, e))
258
259   # Delete any stale left over packages.
260   for old_archive in old_archives:
261     archive_file = package_locations.GetLocalPackageArchiveFile(
262         tar_dir,
263         package_target,
264         package_name,
265         old_archive)
266     os.unlink(archive_file)
267
268     archive_log = package_locations.GetLocalPackageArchiveLogFile(archive_file)
269     if os.path.isfile(archive_log):
270       os.unlink(archive_log)
271
272   # Save the package file so we know what we currently have.
273   if update_archives or old_archives:
274     package_desc.SavePackageFile(local_package_file)
275
276   return downloaded_files
277
278
279 def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
280                            extra_archives=[]):
281   """Archives local package archives to the tar directory.
282
283   Args:
284     tar_dir: Root tar directory where archives live.
285     package_target: Package target of the package to archive.
286     package_name: Package name of the package to archive.
287     archives: List of archive file paths where archives currently live.
288     extra_archives: Extra archives that are expected to be build elsewhere.
289   Returns:
290     Returns the local package file that was archived.
291   """
292   local_package_file = package_locations.GetLocalPackageFile(tar_dir,
293                                                              package_target,
294                                                              package_name)
295
296   valid_archive_files = set()
297   archive_list = []
298
299   package_desc = package_info.PackageInfo()
300   package_archives = ([(archive, False) for archive in archives] +
301                       [(archive, True) for archive in extra_archives])
302   for archive, skip_missing in package_archives:
303     archive_url = None
304     archive_log_url = None
305     if '@' in archive:
306       archive, archive_url = archive.split('@', 1)
307       if ',' in archive_url:
308         archive_url, archive_log_url = archive_url.split(',', 1)
309
310     extract_param = ''
311     tar_src_dir = ''
312     extract_dir = ''
313     if ',' in archive:
314       archive, extract_param = archive.split(',', 1)
315       if ':' in extract_param:
316         tar_src_dir, extract_dir = extract_param.split(':', 1)
317       else:
318         tar_src_dir = extract_param
319
320     archive_hash = archive_info.GetArchiveHash(archive)
321     archive_name = os.path.basename(archive)
322     archive_desc = archive_info.ArchiveInfo(name=archive_name,
323                                             hash=archive_hash,
324                                             url=archive_url,
325                                             tar_src_dir=tar_src_dir,
326                                             extract_dir=extract_dir,
327                                             log_url=archive_log_url)
328     package_desc.AppendArchive(archive_desc)
329
330     if archive_hash is None:
331       if skip_missing:
332         logging.info('Skipping archival of missing file: %s', archive)
333         continue
334       raise error.Error('Invalid package: %s.' % archive)
335     archive_list.append(archive)
336
337     archive_basename = os.path.basename(archive)
338     archive_json = archive_basename + '.json'
339     valid_archive_files.update([archive_basename, archive_json])
340
341   # Delete any stale archive files
342   local_archive_dir = package_locations.GetLocalPackageArchiveDir(
343       tar_dir,
344       package_target,
345       package_name)
346
347   if os.path.isdir(local_archive_dir):
348     for dir_item in os.listdir(local_archive_dir):
349       if dir_item in valid_archive_files:
350         continue
351
352       item_path = os.path.join(local_archive_dir, dir_item)
353       if os.path.isdir(item_path):
354         pynacl.file_tools.RemoveDir(item_path)
355       else:
356         pynacl.file_tools.RemoveFile(item_path)
357
358   # We do not need to archive the package if it already matches. But if the
359   # local package file is invalid or does not match, then we should recreate
360   # the json file.
361   if os.path.isfile(local_package_file):
362     try:
363       current_package_desc = package_info.PackageInfo(local_package_file,
364                                                       skip_missing=True)
365       if current_package_desc == package_desc:
366         return
367     except ValueError:
368       pass
369
370   # Copy each of the packages over to the tar directory first.
371   for archive_file in archive_list:
372     archive_name = os.path.basename(archive_file)
373     local_archive_file = package_locations.GetLocalPackageArchiveFile(
374         tar_dir,
375         package_target,
376         package_name,
377         archive_name)
378
379     logging.info('Archiving file: %s', archive_file)
380     pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
381     shutil.copyfile(archive_file, local_archive_file)
382
383   # Once all the copying is completed, update the local packages file.
384   logging.info('Package "%s" archived: %s', package_name, local_package_file)
385   pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
386   package_desc.SavePackageFile(local_package_file)
387
388   return local_package_file
389
390
391 def UploadPackage(storage, revision, tar_dir, package_target, package_name,
392                   is_shared_package, annotate=False, skip_missing=False,
393                   custom_package_file=None):
394   """Uploads a local package file to the supplied cloud storage object.
395
396   By default local package files are expected to be found in the standardized
397   location within the tar directory, however a custom package file may be
398   specified to upload from a different location. Package archives that do not
399   have their URL field set will automaticaly have the archives uploaded so that
400   someone accessing the package file from the cloud storage will also have
401   access to the package archives.
402
403   Args:
404     storage: Cloud storage object which supports PutFile and GetFile.
405     revision: SVN Revision number the package should be associated with.
406     tar_dir: Root tar directory where archives live.
407     package_target: Package target of the package to archive.
408     package_name: Package name of the package to archive.
409     is_shared_package: Is this package shared among all package targets?
410     annotate: Print annotations for build bots?
411     skip_missing: Skip missing package archive files?
412     custom_package_file: File location for a custom package file.
413   Returns:
414     Returns remote download key for the uploaded package file.
415   """
416   if custom_package_file is not None:
417     local_package_file = custom_package_file
418   else:
419     local_package_file = package_locations.GetLocalPackageFile(
420         tar_dir,
421         package_target,
422         package_name)
423
424   # Upload the package file and also upload any local package archives so
425   # that they are downloadable.
426   package_desc = package_info.PackageInfo(local_package_file,
427                                           skip_missing=skip_missing)
428   upload_package_desc = package_info.PackageInfo()
429
430   for archive_obj in package_desc.GetArchiveList():
431     archive_desc = archive_obj.GetArchiveData()
432     url = archive_desc.url
433     if archive_desc.hash and url is None:
434       if annotate:
435         print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name
436
437       archive_file = package_locations.GetLocalPackageArchiveFile(
438           tar_dir,
439           package_target,
440           package_name,
441           archive_desc.name)
442       archive_hash = archive_info.GetArchiveHash(archive_file)
443       if archive_hash is None:
444         raise error.Error('Missing Archive File: %s' % archive_file)
445       elif archive_hash != archive_desc.hash:
446         raise error.Error(
447             'Archive hash does not match package hash: %s' % archive_file
448             + '\n  Archive Hash: %s' % archive_hash
449             + '\n  Package Hash: %s' % archive_desc.hash)
450
451       logging.warn('Missing archive URL: %s', archive_desc.name)
452       logging.warn('Uploading archive to be publically available...')
453       remote_archive_key = package_locations.GetRemotePackageArchiveKey(
454           archive_desc.name,
455           archive_desc.hash)
456       url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
457       if annotate:
458         print '@@@STEP_LINK@download@%s@@@' % url
459
460     updated_archive_obj = archive_obj.Copy(url=url)
461     upload_package_desc.AppendArchive(updated_archive_obj)
462
463   upload_package_file = local_package_file + '.upload'
464   pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
465   upload_package_desc.SavePackageFile(upload_package_file)
466
467   logging.info('Uploading package information: %s', package_name)
468   remote_package_key = package_locations.GetRemotePackageKey(
469       is_shared_package,
470       revision,
471       package_target,
472       package_name)
473   package_info.UploadPackageInfoFiles(storage, package_target, package_name,
474                                       remote_package_key, upload_package_file,
475                                       skip_missing=skip_missing,
476                                       annotate=annotate)
477
478   return remote_package_key
479
480
481 def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
482                           downloader=None, skip_missing=False, quiet=False):
483   """Extracts package targets from the tar directory to the destination.
484
485   Each package archive within a package will be verified before being
486   extracted. If a package archive does not exist or does not match the hash
487   stored within the package file, it will be re-downloaded before being
488   extracted.
489
490   Args:
491     package_target_packages: List of tuples of package target and package names.
492     tar_dir: Source tar directory where package archives live.
493     dest_dir: Root destination directory where packages will be extracted to.
494     downloader: function which takes a url and a file path for downloading.
495   """
496   if downloader is None:
497     downloader = pynacl.gsd_storage.HttpDownload
498
499   for package_target, package_name in package_target_packages:
500     package_file = package_locations.GetLocalPackageFile(tar_dir,
501                                                          package_target,
502                                                          package_name)
503     package_desc = package_info.PackageInfo(package_file,
504                                             skip_missing=skip_missing)
505     dest_package_dir = package_locations.GetFullDestDir(dest_dir,
506                                                         package_target,
507                                                         package_name)
508     dest_package_file = package_locations.GetDestPackageFile(dest_dir,
509                                                              package_target,
510                                                              package_name)
511
512     # Only do the extraction if the extract packages do not match.
513     if os.path.isfile(dest_package_file):
514       try:
515         dest_package_desc = package_info.PackageInfo(dest_package_file)
516         if dest_package_desc == package_desc:
517           logging.debug('Skipping extraction for package (%s)', package_name)
518           continue
519       except:
520         # Destination package file cannot be trusted, if invalid re-extract.
521         pass
522
523       # Delete the old package file before we extract.
524       os.unlink(dest_package_file)
525
526     if os.path.isdir(dest_package_dir):
527       logging.debug('Deleting old package directory: %s', dest_package_dir)
528       pynacl.file_tools.RemoveDir(dest_package_dir)
529
530     logging.info('Extracting package (%s) to directory: %s',
531                  package_name, dest_package_dir)
532     archive_list = package_desc.GetArchiveList()
533     num_archives = len(archive_list)
534     for index, archive_obj in enumerate(archive_list):
535       archive_desc = archive_obj.GetArchiveData()
536       archive_file = package_locations.GetLocalPackageArchiveFile(
537           tar_dir,
538           package_target,
539           package_name,
540           archive_desc.name
541       )
542
543       # Upon extraction, some files may not be downloaded (or have stale files),
544       # we need to check the hash of each file and attempt to download it if
545       # they do not match.
546       archive_hash = archive_info.GetArchiveHash(archive_file)
547       if archive_hash != archive_desc.hash:
548         if archive_desc.url is None:
549           if skip_missing:
550             logging.info('Skipping extraction of missing archive: %s' %
551                          archive_file)
552             continue
553           raise error.Error('Invalid archive file and URL: %s' % archive_file)
554
555         logging.warn('Expected archive missing, downloading: %s',
556                      archive_desc.name)
557
558         pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
559         downloader(archive_desc.url, archive_file)
560         archive_hash = archive_info.GetArchiveHash(archive_file)
561         if archive_hash != archive_desc.hash:
562           raise error.Error('Downloaded archive file does not match hash.'
563                       ' [%s] Expected %s, received %s.' %
564                       (archive_file, archive_desc.hash, archive_hash))
565
566       destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
567       logging.info('Extracting %s (%d/%d)' %
568                    (archive_desc.name, index+1, num_archives))
569
570       temp_dir = os.path.join(destination_dir, '.tmp')
571       pynacl.file_tools.RemoveDir(temp_dir)
572       os.makedirs(temp_dir)
573       tar_output = not quiet
574       tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
575       curdir = os.getcwd()
576       os.chdir(temp_dir)
577       try:
578         tar.Extract()
579         tar.Close()
580       finally:
581         os.chdir(curdir)
582
583       temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
584       pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
585       pynacl.file_tools.RemoveDir(temp_dir)
586
587     pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
588     package_desc.SavePackageFile(dest_package_file)
589
590
591 #
592 # Each Command has 2 functions that describes it:
593 #   1. A parser function which specifies the extra command options each command
594 #   will have.
595 #   2. An execution function which is called when a user actually executes
596 #   the command.
597 #
598 def _ListCmdArgParser(subparser):
599   subparser.description = 'Lists package information.'
600
601
602 def _DoListCmd(arguments):
603   package_targets = collections.defaultdict(list)
604   for package_target, package in arguments.package_target_packages:
605     package_targets[package_target].append(package)
606
607   print 'Listing Package Targets and Packages:'
608   for package_target, packages in package_targets.iteritems():
609     print '\n%s:' % package_target
610     for package in sorted(packages):
611       print ' ', package
612
613
614 def _ArchiveCmdArgParser(subparser):
615   subparser.description = 'Archive package archives to tar directory.'
616   subparser.add_argument(
617     '--archive-package', metavar='NAME', dest='archive__package',
618     required=True,
619     help='Package name archives will be packaged into.')
620   subparser.add_argument(
621     '--extra-archive', metavar='ARCHIVE', dest='archive__extra_archive',
622     action='append', default=[],
623     help='Extra archives that are expected to be built elsewhere.')
624   subparser.add_argument(
625     metavar='TAR(,SRCDIR(:EXTRACTDIR))(@URL,LOGURL)', dest='archive__archives',
626     nargs='+',
627     help='Package archive with an optional tar information and url.'
628          ' SRCDIR is the root directory where files live inside of the tar.'
629          ' EXTRACTDIR is the directory to extract files to relative to the'
630          ' destination directory. The URL is where the package can be'
631          ' downloaded from.')
632   subparser.add_argument(
633     '-x', '--extract', dest='archive__extract',
634     action='store_true', default=False,
635     help='Extract package archives after they have been archived.')
636
637
638 def _DoArchiveCmd(arguments):
639   package_target_packages = GetPackageTargetPackages(
640       arguments.archive__package,
641       arguments.package_target_packages
642   )
643   if not package_target_packages:
644     raise error.Error('Unknown package: %s.' % arguments.archive__package
645                 + ' Did you forget to add "$PACKAGE_TARGET/"?')
646
647   for package_target, package_name in package_target_packages:
648     ArchivePackageArchives(arguments.tar_dir,
649                            package_target,
650                            package_name,
651                            arguments.archive__archives,
652                            extra_archives=arguments.archive__extra_archive)
653
654     if arguments.archive__extract:
655       ExtractPackageTargets([(package_target, package_name)],
656                             arguments.tar_dir,
657                             arguments.dest_dir,
658                             skip_missing=True,
659                             quiet=arguments.quiet)
660
661
662 def _ExtractCmdArgParser(subparser):
663   subparser.description = 'Extract packages from tar directory.'
664   subparser.add_argument(
665     '--skip-missing', dest='extract__skip_missing',
666     action='store_true', default=False,
667     help='Skip missing archive files when extracting rather than erroring out.')
668
669
670 def _DoExtractCmd(arguments):
671   ExtractPackageTargets(
672       arguments.package_target_packages,
673       arguments.tar_dir,
674       arguments.dest_dir,
675       skip_missing=arguments.extract__skip_missing,
676       quiet=arguments.quiet)
677
678
679 def _UploadCmdArgParser(subparser):
680   subparser.description = 'Upload a package file.'
681   subparser.add_argument(
682     '--upload-package', metavar='NAME', dest='upload__package', required=True,
683     help='Package to upload.')
684   subparser.add_argument(
685     '--revision', metavar='NUM', dest='upload__revision', required=True,
686     help='SVN Revision of the package to upload.')
687   subparser.add_argument(
688     '--package-file', metavar='FILE', dest='upload__file',
689     default=None,
690     help='Use custom package file instead of standard package file found'
691          ' in the tar directory.')
692   subparser.add_argument(
693     '--skip-missing', dest='upload__skip_missing',
694     action='store_true', default=False,
695     help='Skip missing archive files when uploading package archives.')
696
697
698 def _DoUploadCmd(arguments):
699   package_target_packages = GetPackageTargetPackages(
700       arguments.upload__package,
701       arguments.package_target_packages
702   )
703   if not package_target_packages:
704     raise error.Error('Unknown package: %s.' % arguments.upload__package
705                 + ' Did you forget to add "$PACKAGE_TARGET/"?')
706
707   for package_target, package_name in package_target_packages:
708     UploadPackage(
709         arguments.gsd_store,
710         arguments.upload__revision,
711         arguments.tar_dir,
712         package_target,
713         package_name,
714         arguments.packages_desc.IsSharedPackage(package_name),
715         annotate=arguments.annotate,
716         skip_missing=arguments.upload__skip_missing,
717         custom_package_file=arguments.upload__file
718     )
719
720
721 def _SyncCmdArgParser(subparser):
722   subparser.description = 'Download package archives to the tar directory.'
723   subparser.add_argument(
724     '--revision', metavar='NUM', dest='sync__revision',
725     default=None,
726     help='SVN Revision of the packages to download.')
727   subparser.add_argument(
728     '--include-logs', dest='sync__include_logs',
729     action='store_true', default=False,
730     help='Also download logs next to each archive if available.')
731   subparser.add_argument(
732     '-x', '--extract', dest='sync__extract',
733     action='store_true', default=False,
734     help='Extract package archives after they have been downloaded.')
735
736
737 def _DoSyncCmd(arguments):
738   for package_target, package_name in arguments.package_target_packages:
739     if arguments.sync__revision is None:
740       # When the sync revision number is not specified, use the set
741       # revision number found in the revision directory.
742       revision_file = package_locations.GetRevisionFile(
743           arguments.revisions_dir,
744           package_name)
745       revision_desc = revision_info.RevisionInfo(
746           arguments.packages_desc,
747           revision_file)
748       package_desc = revision_desc.GetPackageInfo(package_target)
749       revision_num = revision_desc.GetRevisionNumber()
750     else:
751       # When the sync revision number is specified, find the package to
752       # download remotely using the revision.
753       revision_num = arguments.sync__revision
754       remote_package_key = package_locations.GetRemotePackageKey(
755           arguments.packages_desc.IsSharedPackage(package_name),
756           arguments.sync__revision,
757           package_target,
758           package_name)
759       with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
760         temp_package_file = os.path.join(
761             work_dir,
762             os.path.basename(remote_package_key) + TEMP_SUFFIX)
763
764         package_info.DownloadPackageInfoFiles(
765             temp_package_file,
766             remote_package_key,
767             downloader=arguments.gsd_store.GetFile)
768
769         package_desc = package_info.PackageInfo(temp_package_file)
770
771     DownloadPackageArchives(
772         arguments.tar_dir,
773         package_target,
774         package_name,
775         package_desc,
776         revision_num=revision_num,
777         include_logs=arguments.sync__include_logs)
778
779   CleanTempFiles(arguments.tar_dir)
780
781   if arguments.sync__extract:
782     ExtractPackageTargets(
783         arguments.package_target_packages,
784         arguments.tar_dir,
785         arguments.dest_dir,
786         quiet=arguments.quiet)
787
788
789 def _SetRevisionCmdArgParser(subparser):
790   subparser.description = 'Specify the revision of a package.'
791   subparser.add_argument(
792     '--revision-package', metavar='NAME', dest='setrevision__package',
793     required=True,
794     help='Package name to set revision of.')
795   subparser.add_argument(
796     '--revision', metavar='NUM', dest='setrevision__revision',
797     type=int, required=True,
798     help='SVN Revision of the package to set.')
799
800
801 def _DoSetRevisionCmd(arguments):
802   package_name = arguments.setrevision__package
803   revision_num = arguments.setrevision__revision
804
805   revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
806   revision_desc.SetRevisionNumber(revision_num)
807
808   custom_package_targets = GetPackageTargetPackages(package_name, [])
809   if not custom_package_targets:
810     package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
811         package_name)
812   else:
813     package_targets = [target[0] for target in custom_package_targets]
814     first_target = custom_package_targets[0]
815     package_name = first_target[1]
816
817   with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
818     for package_target in package_targets:
819       remote_package_key = package_locations.GetRemotePackageKey(
820           arguments.packages_desc.IsSharedPackage(package_name),
821           revision_num,
822           package_target,
823           package_name)
824
825       temp_package_file = os.path.join(
826           work_dir,
827           os.path.basename(remote_package_key) + TEMP_SUFFIX)
828
829       package_info.DownloadPackageInfoFiles(
830           temp_package_file,
831           remote_package_key,
832           downloader=arguments.gsd_store.GetFile)
833
834       package_desc = package_info.PackageInfo(temp_package_file)
835
836       logging.info('Setting %s:%s to revision %s',
837                    package_target, package_name, revision_num)
838       revision_desc.SetTargetRevision(
839           package_name,
840           package_target,
841           package_desc)
842
843   revision_file = package_locations.GetRevisionFile(
844       arguments.revisions_dir,
845       package_name)
846   pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
847   revision_desc.SaveRevisionFile(revision_file)
848
849   CleanTempFiles(arguments.revisions_dir)
850
851
852 def _GetRevisionCmdArgParser(subparser):
853   subparser.description = 'Get the revision of a package.'
854   subparser.add_argument(
855     '--revision-package', metavar='NAME', dest='getrevision__package',
856     required=True,
857     help='Package name to get revision of.')
858
859
860 def _DoGetRevisionCmd(arguments):
861   package_name = arguments.getrevision__package
862
863   custom_package_targets = GetPackageTargetPackages(package_name, [])
864   if custom_package_targets:
865     custom_target, package_name = custom_package_targets[0]
866
867   revision_file = package_locations.GetRevisionFile(arguments.revisions_dir,
868                                                     package_name)
869
870   if not os.path.isfile(revision_file):
871     raise error.Error('No revision set for package: %s.' % package_name)
872
873   revision_desc = revision_info.RevisionInfo(arguments.packages_desc,
874                                              revision_file)
875   print revision_desc.GetRevisionNumber()
876
877
878 def _FillEmptyTarsParser(subparser):
879   subparser.description = 'Fill missing archives with empty ones in a package.'
880   subparser.add_argument(
881     '--fill-package', metavar='NAME', dest='fillemptytars_package',
882     required=True,
883     help='Package name to fill empty archives of.')
884
885
886 def _DoFillEmptyTarsCmd(arguments):
887   package_target_packages = GetPackageTargetPackages(
888       arguments.fillemptytars_package,
889       arguments.package_target_packages
890   )
891   if not package_target_packages:
892     raise error.Error('Unknown package: %s.' % arguments.fillemptytars_package
893                 + ' Did you forget to add "$PACKAGE_TARGET/"?')
894
895   for package_target, package_name in package_target_packages:
896     package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
897                                                          package_target,
898                                                          package_name)
899
900     package_desc = package_info.PackageInfo(package_path, skip_missing=True)
901     output_package_desc = package_info.PackageInfo()
902     for archive in package_desc.GetArchiveList():
903       # If archive does not exist, fill it with an empty one.
904       archive_data = archive.GetArchiveData()
905       if archive_data.hash:
906         output_package_desc.AppendArchive(archive)
907       else:
908         logging.info('Filling missing archive: %s.', archive_data.name)
909         if (archive_data.name.endswith('.tar.gz') or
910             archive_data.name.endswith('.tgz')):
911           mode = 'w:gz'
912         elif archive_data.name.endswith('.bz2'):
913           mode = 'w:bz2'
914         elif archive_data.name.endswith('.tar'):
915           mode = 'w:'
916         else:
917           raise error.Error('Unknown archive type: %s.' % archive_data.name)
918
919         archive_file = package_locations.GetLocalPackageArchiveFile(
920             arguments.tar_dir,
921             package_target,
922             package_name,
923             archive_data.name
924             )
925
926         tar_file = cygtar.CygTar(archive_file, mode)
927         tar_file.Close()
928         tar_hash = archive_info.GetArchiveHash(archive_file)
929
930         empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
931                                                  hash=tar_hash)
932         output_package_desc.AppendArchive(empty_archive)
933
934     output_package_desc.SavePackageFile(package_path)
935
936
937 def _RecalcRevisions(subparser):
938   subparser.description = 'Recalculates hashes for files in revision directory.'
939
940
941 def _DoRecalcRevisions(arguments):
942   for json_file in os.listdir(arguments.revisions_dir):
943     if json_file.endswith('.json'):
944       revision_file = os.path.join(arguments.revisions_dir, json_file)
945       revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
946       revision_desc.LoadRevisionFile(revision_file, skip_hash_verify=True)
947       revision_desc.SaveRevisionFile(revision_file)
948
949
950 CommandFuncs = collections.namedtuple(
951     'CommandFuncs',
952     ['parse_func', 'do_cmd_func'])
953
954
955 COMMANDS = {
956     'list': CommandFuncs(_ListCmdArgParser, _DoListCmd),
957     'archive': CommandFuncs(_ArchiveCmdArgParser, _DoArchiveCmd),
958     'extract': CommandFuncs(_ExtractCmdArgParser, _DoExtractCmd),
959     'upload': CommandFuncs(_UploadCmdArgParser, _DoUploadCmd),
960     'sync': CommandFuncs(_SyncCmdArgParser, _DoSyncCmd),
961     'setrevision': CommandFuncs(_SetRevisionCmdArgParser, _DoSetRevisionCmd),
962     'getrevision': CommandFuncs(_GetRevisionCmdArgParser, _DoGetRevisionCmd),
963     'fillemptytars': CommandFuncs(_FillEmptyTarsParser, _DoFillEmptyTarsCmd),
964     'recalcrevisions': CommandFuncs(_RecalcRevisions, _DoRecalcRevisions),
965 }
966
967
968 def ParseArgs(args):
969   parser = argparse.ArgumentParser()
970
971   host_platform = pynacl.platform.GetOS()
972   host_arch = pynacl.platform.GetArch3264()
973
974   # List out global options for all commands.
975   parser.add_argument(
976     '-v', '--verbose', dest='verbose',
977     action='store_true', default=False,
978     help='Verbose output')
979   parser.add_argument(
980     '-q', '--quiet', dest='quiet',
981     action='store_true', default=False,
982     help='Quiet output')
983   parser.add_argument(
984     '--platform', dest='host_platform',
985     default=host_platform,
986     help='Custom platform other than the current (%s).' % host_platform)
987   parser.add_argument(
988     '--arch', dest='host_arch',
989     default=host_arch,
990     help='Custom architecture other than the current (%s).' % host_arch)
991   parser.add_argument(
992     '--package-targets', dest='package_targets',
993     default=None,
994     help='Custom package targets specifed as comma separated names. Defaults'
995          ' to package targets defined for host platform and architecture inside'
996          ' of the packages json file.')
997   parser.add_argument(
998     '--packages', dest='packages',
999     default=None,
1000     help='Custom packages specified as comma separated package names. Custom'
1001          ' packages not defined by the packages json file must be prefixed by'
1002          ' the package_target directory (IE. $PACKAGE_TARGET/$PACKAGE).')
1003   parser.add_argument(
1004     '--append', metavar='PACKAGE', dest='append_packages',
1005     action='append', default=[],
1006     help='Append extra package to current list of packages.')
1007   parser.add_argument(
1008     '--exclude', metavar='PACKAGE', dest='exclude_packages',
1009     action='append', default=[],
1010     help='Exclude package from current list of packages.')
1011   parser.add_argument(
1012     '--packages-json', dest='packages_json',
1013     default=DEFAULT_PACKAGES_JSON, type=argparse.FileType('rt'),
1014     help='Packages description file.'
1015          ' [Default: %s]' % DEFAULT_PACKAGES_JSON)
1016   parser.add_argument(
1017     '--revisions-dir', dest='revisions_dir',
1018     default=DEFAULT_REVISIONS_DIR,
1019     help='Revisions directory where packages revisions will be found.')
1020   parser.add_argument(
1021     '--dest-dir', dest='dest_dir',
1022     default=DEFAULT_DEST_DIR,
1023     help='Destination directory where all the packages will be extracted to.')
1024   parser.add_argument(
1025     '--tar-dir', dest='tar_dir',
1026     default=None,
1027     help='Directory for package archive files. Defaults to "$DEST-DIR/.tars".')
1028   parser.add_argument(
1029     '--annotate', dest='annotate',
1030     action='store_true', default=False,
1031     help='Print out build bot annotations.')
1032   parser.add_argument(
1033     '--cloud-bucket', dest='cloud_bucket',
1034     default=DEFAULT_CLOUD_BUCKET,
1035     help='Google storage cloud bucket name.'
1036          ' [Default: %s]' % DEFAULT_CLOUD_BUCKET)
1037
1038   # Add subparsers for all commands. These are flags for specific commands,
1039   # IE. [options] command [command-options]
1040   command_parser = parser.add_subparsers(title='command', dest='command')
1041   for command, cmd_funcs in COMMANDS.iteritems():
1042     sub_parser = command_parser.add_parser(command)
1043     cmd_funcs.parse_func(sub_parser)
1044
1045   arguments = parser.parse_args(args)
1046   pynacl.log_tools.SetupLogging(
1047       verbose=arguments.verbose, quiet=arguments.quiet)
1048   if arguments.tar_dir is None:
1049     arguments.tar_dir = os.path.join(arguments.dest_dir, '.tars')
1050
1051   # Parse the package description up front and store it into the arguments
1052   # object. Almost all the commands need to use this information.
1053   packages_desc = packages_info.PackagesInfo(arguments.packages_json)
1054   arguments.packages_desc = packages_desc
1055
1056   # Based on the host platform and host architecture, we can determine the set
1057   # of package targets used from the packages description. Minimize platform
1058   # and architecture errors by standardizing the names using pynacl.platform.
1059   if arguments.package_targets is None:
1060     package_targets = packages_desc.GetPackageTargets(
1061         pynacl.platform.GetOS(arguments.host_platform),
1062         pynacl.platform.GetArch3264(arguments.host_arch))
1063   else:
1064     package_targets = arguments.package_targets.split(',')
1065
1066   # If the packages argument were not set, use the default list of packages
1067   # for each package target.
1068   packages_set = set()
1069   if arguments.packages is None:
1070     for package_target in package_targets:
1071       packages = packages_desc.GetPackages(package_target)
1072       if packages is None:
1073         raise error.Error('No packages defined for Package Target: %s.' %
1074                           package_target)
1075       packages_set.update(packages)
1076   else:
1077     packages_set.update(arguments.packages.split(','))
1078
1079   # Append/exclude any extra packages that were specified.
1080   packages_set.update(arguments.append_packages)
1081   packages_set.difference_update(arguments.exclude_packages)
1082
1083   # Build a dictionary that organizes packages to their respective package
1084   # targets. Packages may exist in multiple package targets so we will have
1085   # to have the key be package and value be a list of package targets.
1086   package_targets_dict = collections.defaultdict(list)
1087   for package_target in package_targets:
1088     for package in packages_desc.GetPackages(package_target):
1089       package_targets_dict[package].append(package_target)
1090
1091   # Use the list of packages to determine the set of package target packages
1092   # we are operating on, custom package targets will have the package target
1093   # inside of the name of the package name (see help for "--packages" argument).
1094   # The package_target_packages is a list of tuples (package_target, package),
1095   # for every package along with the associated package target.
1096   package_target_packages = []
1097   for package in sorted(packages_set):
1098     package_targets = package_targets_dict.get(package, None)
1099     if package_targets is None:
1100       custom_package_targets = GetPackageTargetPackages(package, [])
1101       if not custom_package_targets:
1102         raise error.Error('Invalid custom package: "%s".'
1103                           ' Expected $PACKAGE_TARGET/$PACKAGE' % package)
1104       package_target_packages.extend(custom_package_targets)
1105     else:
1106       for package_target in package_targets:
1107         package_target_packages.append((package_target, package))
1108
1109   arguments.package_target_packages = package_target_packages
1110
1111   # Create a GSD Storage object for those who need it.
1112   cloud_bucket = arguments.cloud_bucket
1113   gsd_store = pynacl.gsd_storage.GSDStorage(cloud_bucket, [cloud_bucket])
1114   arguments.gsd_store = gsd_store
1115
1116   return arguments
1117
1118
1119 def main(args):
1120   try:
1121     arguments = ParseArgs(args)
1122     return COMMANDS[arguments.command].do_cmd_func(arguments)
1123   except error.Error as e:
1124     sys.stderr.write('package_version: ' + str(e) + '\n')
1125     return 1
1126
1127
1128 if __name__ == '__main__':
1129   sys.exit(main(sys.argv[1:]))