2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
12 from telemetry.core import command_line
13 from telemetry.util import cloud_storage
17 'public': cloud_storage.PUBLIC_BUCKET,
18 'partner': cloud_storage.PARTNER_BUCKET,
19 'google-only': cloud_storage.INTERNAL_BUCKET,
21 BUCKETS = {bucket: easy_bucket_name for easy_bucket_name, bucket
22 in BUCKET_ALIASES.iteritems()}
26 root, ext = os.path.splitext(path)
32 hash_path = path + '.sha1'
33 return file_path, hash_path
36 def _FindFilesInCloudStorage(files):
37 """Returns a dict of all files and which buckets they're in."""
38 # Preprocessing: get the contents of all buckets.
40 for bucket in BUCKETS:
42 bucket_contents[bucket] = cloud_storage.List(bucket)
43 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
46 # Check if each file is in the bucket contents.
49 file_path, hash_path = _GetPaths(path)
51 if file_path in file_buckets:
52 # Ignore duplicates, if both data and sha1 file were in the file list.
54 if not os.path.exists(hash_path):
55 # Probably got some non-Cloud Storage files in the file list. Ignore.
58 file_hash = cloud_storage.ReadHash(hash_path)
59 file_buckets[file_path] = []
60 for bucket in BUCKETS:
61 if bucket in bucket_contents and file_hash in bucket_contents[bucket]:
62 file_buckets[file_path].append(bucket)
67 class Ls(command_line.Command):
68 """List which bucket each file is in."""
71 def AddCommandLineArgs(cls, parser):
72 parser.add_argument('-r', '--recursive', action='store_true')
73 parser.add_argument('paths', nargs='+')
76 def ProcessCommandLineArgs(cls, parser, args):
77 for path in args.paths:
78 if not os.path.exists(path):
79 parser.error('Path not found: %s' % path)
82 def GetFilesInPaths(paths, recursive):
83 """If path is a dir, yields all files in path, otherwise just yields path.
85 If recursive is true, walks subdirectories recursively."""
87 if not os.path.isdir(path):
92 for root, _, filenames in os.walk(path):
93 for filename in filenames:
94 yield os.path.join(root, filename)
96 for filename in os.listdir(path):
97 yield os.path.join(path, filename)
99 files = _FindFilesInCloudStorage(GetFilesInPaths(args.paths, args.recursive))
102 print 'No files in Cloud Storage.'
105 for file_path, buckets in sorted(files.iteritems()):
107 buckets = [BUCKETS[bucket] for bucket in buckets]
108 print '%-11s %s' % (','.join(buckets), file_path)
110 print '%-11s %s' % ('not found', file_path)
113 class Mv(command_line.Command):
114 """Move files to the given bucket."""
117 def AddCommandLineArgs(cls, parser):
118 parser.add_argument('files', nargs='+')
119 parser.add_argument('bucket', choices=BUCKET_ALIASES)
122 def ProcessCommandLineArgs(cls, parser, args):
123 args.bucket = BUCKET_ALIASES[args.bucket]
126 files = _FindFilesInCloudStorage(args.files)
128 for file_path, buckets in sorted(files.iteritems()):
130 raise IOError('%s not found in Cloud Storage.' % file_path)
132 for file_path, buckets in sorted(files.iteritems()):
133 if args.bucket in buckets:
134 buckets.remove(args.bucket)
136 logging.info('Skipping %s, no action needed.' % file_path)
139 # Move to the target bucket.
140 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
141 cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
143 # Delete all additional copies.
144 for bucket in buckets:
145 cloud_storage.Delete(bucket, file_hash)
148 class Rm(command_line.Command):
149 """Remove files from Cloud Storage."""
152 def AddCommandLineArgs(cls, parser):
153 parser.add_argument('files', nargs='+')
156 files = _FindFilesInCloudStorage(args.files)
157 for file_path, buckets in sorted(files.iteritems()):
158 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
159 for bucket in buckets:
160 cloud_storage.Delete(bucket, file_hash)
163 class Upload(command_line.Command):
164 """Upload files to Cloud Storage."""
167 def AddCommandLineArgs(cls, parser):
168 parser.add_argument('files', nargs='+')
169 parser.add_argument('bucket', choices=BUCKET_ALIASES)
172 def ProcessCommandLineArgs(cls, parser, args):
173 args.bucket = BUCKET_ALIASES[args.bucket]
175 for path in args.files:
176 if not os.path.exists(path):
177 parser.error('File not found: %s' % path)
180 for file_path in args.files:
181 file_hash = cloud_storage.CalculateHash(file_path)
183 # Create or update the hash file.
184 hash_path = file_path + '.sha1'
185 with open(hash_path, 'wb') as f:
189 # Add the data to Cloud Storage.
190 cloud_storage.Insert(args.bucket, file_hash, file_path)
192 # Add the hash file to the branch, for convenience. :)
193 subprocess.call(['git', 'add', hash_path])
196 class CloudStorageCommand(command_line.SubcommandCommand):
197 commands = (Ls, Mv, Rm, Upload)
200 if __name__ == '__main__':
201 logging.getLogger().setLevel(logging.INFO)
202 sys.exit(CloudStorageCommand.main())