2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
11 from telemetry.core import command_line
12 from telemetry.page import cloud_storage
16 'public': cloud_storage.PUBLIC_BUCKET,
17 #'partner': cloud_storage.PARTNER_BUCKET,
18 'google-only': cloud_storage.INTERNAL_BUCKET,
23 root, ext = os.path.splitext(path)
29 hash_path = path + '.sha1'
30 return file_path, hash_path
33 def _FindFilesInCloudStorage(files):
35 for easy_bucket_name, bucket in BUCKET_CHOICES.iteritems():
37 bucket_contents[easy_bucket_name] = cloud_storage.List(bucket)
38 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
43 file_path, hash_path = _GetPaths(path)
45 if file_path in file_buckets:
47 if not os.path.exists(hash_path):
50 with open(hash_path, 'rb') as f:
51 file_hash = f.read(1024).rstrip()
54 for bucket in BUCKET_CHOICES:
55 if bucket not in bucket_contents:
57 if file_hash in bucket_contents[bucket]:
58 buckets.append(bucket)
60 file_buckets[file_path] = buckets
65 class Ls(command_line.ArgparseCommand):
66 """List which bucket each file is in."""
68 def AddCommandLineOptions(self, parser):
69 parser.add_argument('-r', '--recursive', action='store_true')
70 parser.add_argument('paths', nargs='+')
72 def ProcessCommandLine(self, parser, args):
73 for path in args.paths:
74 if not os.path.exists(path):
75 parser.error('File not found: %s' % path)
78 def GetFilesInPath(paths, recursive):
79 """If path is a dir, yields all files in path, otherwise just yields path.
81 If recursive is true, walks subdirectories recursively."""
83 if not os.path.isdir(path):
88 for root, _, filenames in os.walk(path):
89 for filename in filenames:
90 yield os.path.join(root, filename)
92 for filename in os.listdir(path):
93 yield os.path.join(path, filename)
95 files = _FindFilesInCloudStorage(GetFilesInPath(args.paths, args.recursive))
98 print 'No files in Cloud Storage.'
101 for file_path, buckets in sorted(files.iteritems()):
103 print '%-11s %s' % (','.join(buckets), file_path)
105 print '%-11s %s' % ('not found', file_path)
108 class Mv(command_line.ArgparseCommand):
109 """Move files to the given bucket."""
111 def AddCommandLineOptions(self, parser):
112 parser.add_argument('files', nargs='+')
113 parser.add_argument('bucket', choices=BUCKET_CHOICES)
115 def ProcessCommandLine(self, parser, args):
116 args.bucket = BUCKET_CHOICES[args.bucket]
118 for path in args.paths:
119 _, hash_path = _GetPaths(path)
120 if not os.path.exists(hash_path):
121 parser.error('File not found: %s' % hash_path)
124 files = _FindFilesInCloudStorage(args.files)
126 for file_path, buckets in sorted(files.iteritems()):
128 raise IOError('%s not found in Cloud Storage.' % file_path)
130 for file_path, buckets in sorted(files.iteritems()):
131 hash_path = file_path + '.sha1'
132 with open(hash_path, 'rb') as f:
133 file_hash = f.read(1024).rstrip()
135 cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
137 for bucket in buckets:
138 if bucket == args.bucket:
140 cloud_storage.Delete(bucket, file_hash)
143 class Rm(command_line.ArgparseCommand):
144 """Remove files from Cloud Storage."""
146 def AddCommandLineOptions(self, parser):
147 parser.add_argument('files', nargs='+')
149 def ProcessCommandLine(self, parser, args):
150 for path in args.paths:
151 _, hash_path = _GetPaths(path)
152 if not os.path.exists(hash_path):
153 parser.error('File not found: %s' % hash_path)
156 files = _FindFilesInCloudStorage(args.files)
157 for file_path, buckets in sorted(files.iteritems()):
158 hash_path = file_path + '.sha1'
159 with open(hash_path, 'rb') as f:
160 file_hash = f.read(1024).rstrip()
162 for bucket in buckets:
163 cloud_storage.Delete(bucket, file_hash)
166 class Upload(command_line.ArgparseCommand):
167 """Upload files to Cloud Storage."""
169 def AddCommandLineOptions(self, parser):
170 parser.add_argument('files', nargs='+')
171 parser.add_argument('bucket', choices=BUCKET_CHOICES)
173 def ProcessCommandLine(self, parser, args):
174 args.bucket = BUCKET_CHOICES[args.bucket]
176 for path in args.paths:
177 if not os.path.exists(path):
178 parser.error('File not found: %s' % path)
181 for file_path in args.files:
182 file_hash = cloud_storage.GetHash(file_path)
184 # Create or update the hash file.
185 hash_path = file_path + '.sha1'
186 with open(hash_path, 'wb') as f:
190 # Add the data to Cloud Storage.
191 cloud_storage.Insert(args.bucket, file_hash, file_path)
193 # Add the hash file to the branch, for convenience. :)
194 subprocess.call(['git', 'add', hash_path])
197 COMMANDS = (Ls, Mv, Rm, Upload)
201 logging.getLogger().setLevel(logging.INFO)
203 parser = argparse.ArgumentParser()
204 subparsers = parser.add_subparsers()
206 for command in COMMANDS:
208 subparser = subparsers.add_parser(command.name, help=command.description)
209 subparser.set_defaults(command=command)
210 command.AddCommandLineOptions(subparser)
212 args = parser.parse_args()
213 args.command.ProcessCommandLine(parser, args)
214 args.command.Run(args)
217 if __name__ == '__main__':