1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Library to make common google storage operations more reliable."""
18 from chromite.buildbot import constants
19 from chromite.lib import cache
20 from chromite.lib import cros_build_lib
21 from chromite.lib import osutils
22 from chromite.lib import retry_util
23 from chromite.lib import timeout_util
25 PUBLIC_BASE_HTTPS_URL = 'https://commondatastorage.googleapis.com/'
26 PRIVATE_BASE_HTTPS_URL = 'https://storage.cloud.google.com/'
29 # Format used by "gsutil ls -l" when reporting modified time.
30 DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
32 # Regexp for parsing each line of output from "gsutil ls -l".
33 # This regexp is prepared for the generation and meta_generation values,
34 # too, even though they are not expected until we use "-a".
35 LS_LA_RE = re.compile(
36 r'^\s*(\d*?)\s+(\S*?)\s+([^#$]+).*?(#(\d+)\s+meta_?generation=(\d+))?\s*$')
39 def CanonicalizeURL(url, strict=False):
40 """Convert provided URL to gs:// URL, if it follows a known format.
43 url: URL to canonicalize.
44 strict: Raises exception if URL cannot be canonicalized.
46 for prefix in (PUBLIC_BASE_HTTPS_URL, PRIVATE_BASE_HTTPS_URL):
47 if url.startswith(prefix):
48 return url.replace(prefix, BASE_GS_URL, 1)
50 if not url.startswith(BASE_GS_URL) and strict:
51 raise ValueError('Url %r cannot be canonicalized.' % url)
56 def GetGsURL(bucket, for_gsutil=False, public=True, suburl=''):
57 """Construct a Google Storage URL
60 bucket: The Google Storage bucket to use
61 for_gsutil: Do you want a URL for passing to `gsutil`?
62 public: Do we want the public or private url
63 suburl: A url fragment to tack onto the end
66 The fully constructed URL
71 urlbase = PUBLIC_BASE_HTTPS_URL if public else PRIVATE_BASE_HTTPS_URL
72 return '%s%s/%s' % (urlbase, bucket, suburl)
75 class GSContextException(Exception):
76 """Base exception for all exceptions thrown by GSContext."""
79 # Since the underlying code uses RunCommand, some callers might be trying to
80 # catch cros_build_lib.RunCommandError themselves. Extend that class so that
81 # code continues to work.
82 class GSCommandError(GSContextException, cros_build_lib.RunCommandError):
83 """Thrown when an error happened we couldn't decode."""
86 class GSContextPreconditionFailed(GSContextException):
87 """Thrown when google storage returns code=PreconditionFailed."""
90 class GSNoSuchKey(GSContextException):
91 """Thrown when google storage returns code=NoSuchKey."""
94 class GSCounter(object):
95 """A counter class for Google Storage."""
97 def __init__(self, ctx, path):
98 """Create a counter object.
101 ctx: A GSContext object.
102 path: The path to the counter in Google Storage.
108 """Get the current value of a counter."""
110 return int(self.ctx.Cat(self.path).output)
114 def AtomicCounterOperation(self, default_value, operation):
115 """Atomically set the counter value using |operation|.
118 default_value: Default value to use for counter, if counter
120 operation: Function that takes the current counter value as a
121 parameter, and returns the new desired value.
124 The new counter value. None if value could not be set.
126 generation, _ = self.ctx.GetGeneration(self.path)
127 for _ in xrange(self.ctx.retries + 1):
129 value = default_value if generation == 0 else operation(self.Get())
130 self.ctx.Copy('-', self.path, input=str(value), version=generation)
132 except (GSContextPreconditionFailed, GSNoSuchKey):
133 # GSContextPreconditionFailed is thrown if another builder is also
134 # trying to update the counter and we lost the race. GSNoSuchKey is
135 # thrown if another builder deleted the counter. In either case, fetch
136 # the generation again, and, if it has changed, try the copy again.
137 new_generation, _ = self.ctx.GetGeneration(self.path)
138 if new_generation == generation:
140 generation = new_generation
143 """Increment the counter.
146 The new counter value. None if value could not be set.
148 return self.AtomicCounterOperation(1, lambda x: x + 1)
151 """Decrement the counter.
154 The new counter value. None if value could not be set.
156 return self.AtomicCounterOperation(-1, lambda x: x - 1)
159 """Reset the counter to zero.
162 The new counter value. None if value could not be set.
164 return self.AtomicCounterOperation(0, lambda x: 0)
166 def StreakIncrement(self):
167 """Increment the counter if it is positive, otherwise set it to 1.
170 The new counter value. None if value could not be set.
172 return self.AtomicCounterOperation(1, lambda x: x + 1 if x > 0 else 1)
174 def StreakDecrement(self):
175 """Decrement the counter if it is negative, otherwise set it to -1.
178 The new counter value. None if value could not be set.
180 return self.AtomicCounterOperation(-1, lambda x: x - 1 if x < 0 else -1)
183 class GSContext(object):
184 """A class to wrap common google storage operations."""
186 # Error messages that indicate an invalid BOTO config.
187 AUTHORIZATION_ERRORS = ('no configured', 'detail=Authorization')
189 DEFAULT_BOTO_FILE = os.path.expanduser('~/.boto')
190 DEFAULT_GSUTIL_TRACKER_DIR = os.path.expanduser('~/.gsutil')
191 # This is set for ease of testing.
192 DEFAULT_GSUTIL_BIN = None
193 DEFAULT_GSUTIL_BUILDER_BIN = '/b/build/third_party/gsutil/gsutil'
194 # How many times to retry uploads.
197 # Multiplier for how long to sleep (in seconds) between retries; will delay
198 # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
199 DEFAULT_SLEEP_TIME = 60
201 GSUTIL_TAR = 'gsutil_3.42.tar.gz'
202 GSUTIL_URL = PUBLIC_BASE_HTTPS_URL + 'pub/%s' % GSUTIL_TAR
204 RESUMABLE_UPLOAD_ERROR = ('Too many resumable upload attempts failed without '
206 RESUMABLE_DOWNLOAD_ERROR = ('Too many resumable download attempts failed '
210 def GetDefaultGSUtilBin(cls, cache_dir=None):
211 if cls.DEFAULT_GSUTIL_BIN is None:
212 if cache_dir is None:
213 # Import here to avoid circular imports (commandline imports gs).
214 from chromite.lib import commandline
215 cache_dir = commandline.GetCacheDir()
216 if cache_dir is not None:
217 common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
218 tar_cache = cache.TarballCache(common_path)
219 key = (cls.GSUTIL_TAR,)
220 # The common cache will not be LRU, removing the need to hold a read
221 # lock on the cached gsutil.
222 ref = tar_cache.Lookup(key)
223 ref.SetDefault(cls.GSUTIL_URL)
224 cls.DEFAULT_GSUTIL_BIN = os.path.join(ref.path, 'gsutil', 'gsutil')
226 # Check if the default gsutil path for builders exists. If
227 # not, try locating gsutil. If none exists, simply use 'gsutil'.
228 gsutil_bin = cls.DEFAULT_GSUTIL_BUILDER_BIN
229 if not os.path.exists(gsutil_bin):
230 gsutil_bin = osutils.Which('gsutil')
231 if gsutil_bin is None:
232 gsutil_bin = 'gsutil'
233 cls.DEFAULT_GSUTIL_BIN = gsutil_bin
235 return cls.DEFAULT_GSUTIL_BIN
237 def __init__(self, boto_file=None, cache_dir=None, acl=None,
238 dry_run=False, gsutil_bin=None, init_boto=False, retries=None,
243 boto_file: Fully qualified path to user's .boto credential file.
244 cache_dir: The absolute path to the cache directory. Use the default
245 fallback if not given.
246 acl: If given, a canned ACL. It is not valid to pass in an ACL file
247 here, because most gsutil commands do not accept ACL files. If you
248 would like to use an ACL file, use the SetACL command instead.
249 dry_run: Testing mode that prints commands that would be run.
250 gsutil_bin: If given, the absolute path to the gsutil binary. Else
251 the default fallback will be used.
252 init_boto: If set to True, GSContext will check during __init__ if a
253 valid boto config is configured, and if not, will attempt to ask the
254 user to interactively set up the boto config.
255 retries: Number of times to retry a command before failing.
256 sleep: Amount of time to sleep between failures.
258 if gsutil_bin is None:
259 gsutil_bin = self.GetDefaultGSUtilBin(cache_dir)
261 self._CheckFile('gsutil not found', gsutil_bin)
262 self.gsutil_bin = gsutil_bin
264 # The version of gsutil is retrieved on demand and cached here.
265 self._gsutil_version = None
267 # TODO (yjhong): disable parallel composite upload for now because
268 # it is not backward compatible (older gsutil versions cannot
269 # download files uploaded with this option enabled). Remove this
270 # after all users transition to newer versions (3.37 and above).
271 self.gsutil_flags = ['-o', 'GSUtil:parallel_composite_upload_threshold=0']
273 # Set HTTP proxy if environment variable http_proxy is set
274 # (crbug.com/325032).
275 if 'http_proxy' in os.environ:
276 url = urlparse.urlparse(os.environ['http_proxy'])
277 if not url.hostname or (not url.username and url.password):
278 logging.warning('GS_ERROR: Ignoring env variable http_proxy because it '
279 'is not properly set: %s', os.environ['http_proxy'])
281 self.gsutil_flags += ['-o', 'Boto:proxy=%s' % url.hostname]
283 self.gsutil_flags += ['-o', 'Boto:proxy_user=%s' % url.username]
285 self.gsutil_flags += ['-o', 'Boto:proxy_pass=%s' % url.password]
287 self.gsutil_flags += ['-o', 'Boto:proxy_port=%d' % url.port]
289 # Increase the number of retries. With 10 retries, Boto will try a total of
290 # 11 times and wait up to 2**11 seconds (~30 minutes) in total, not
291 # not including the time spent actually uploading or downloading.
292 self.gsutil_flags += ['-o', 'Boto:num_retries=10']
294 # Prefer boto_file if specified, else prefer the env then the default.
295 if boto_file is None:
296 boto_file = os.environ.get('BOTO_CONFIG')
297 if boto_file is None:
298 boto_file = self.DEFAULT_BOTO_FILE
299 self.boto_file = boto_file
303 self.dry_run = dry_run
304 self.retries = self.DEFAULT_RETRIES if retries is None else int(retries)
305 self._sleep_time = self.DEFAULT_SLEEP_TIME if sleep is None else int(sleep)
311 def gsutil_version(self):
312 """Return the version of the gsutil in this context."""
313 if not self._gsutil_version:
314 cmd = ['-q', 'version']
316 # gsutil has been known to return version to stderr in the past, so
317 # use combine_stdout_stderr=True.
318 result = self.DoCommand(cmd, combine_stdout_stderr=True)
320 # Expect output like: gsutil version 3.35
321 match = re.search(r'^\s*gsutil\s+version\s+([\d.]+)', result.output,
324 self._gsutil_version = match.group(1)
326 raise GSContextException('Unexpected output format from "%s":\n%s.' %
327 (result.cmdstr, result.output))
329 return self._gsutil_version
331 def _CheckFile(self, errmsg, afile):
332 """Pre-flight check for valid inputs.
335 errmsg: Error message to display.
336 afile: Fully qualified path to test file existance.
338 if not os.path.isfile(afile):
339 raise GSContextException('%s, %s is not a file' % (errmsg, afile))
342 """Quick test of gsutil functionality."""
343 result = self.DoCommand(['ls'], retries=0, debug_level=logging.DEBUG,
344 redirect_stderr=True, error_code_ok=True)
345 return not (result.returncode == 1 and
346 any(e in result.error for e in self.AUTHORIZATION_ERRORS))
348 def _ConfigureBotoConfig(self):
349 """Make sure we can access protected bits in GS."""
350 print 'Configuring gsutil. **Please use your @google.com account.**'
352 self.DoCommand(['config'], retries=0, debug_level=logging.CRITICAL,
355 if (os.path.exists(self.boto_file) and not
356 os.path.getsize(self.boto_file)):
357 os.remove(self.boto_file)
358 raise GSContextException('GS config could not be set up.')
361 if not self._TestGSLs():
362 self._ConfigureBotoConfig()
364 def Cat(self, path, **kwargs):
365 """Returns the contents of a GS object."""
366 kwargs.setdefault('redirect_stdout', True)
367 if not path.startswith(BASE_GS_URL):
368 # gsutil doesn't support cat-ting a local path, so just run 'cat' in that
370 kwargs.pop('retries', None)
371 kwargs.pop('headers', None)
372 return cros_build_lib.RunCommand(['cat', path], **kwargs)
373 return self.DoCommand(['cat', path], **kwargs)
375 def CopyInto(self, local_path, remote_dir, filename=None, **kwargs):
376 """Upload a local file into a directory in google storage.
379 local_path: Local file path to copy.
380 remote_dir: Full gs:// url of the directory to transfer the file into.
381 filename: If given, the filename to place the content at; if not given,
382 it's discerned from basename(local_path).
383 **kwargs: See Copy() for documentation.
385 filename = filename if filename is not None else local_path
386 # Basename it even if an explicit filename was given; we don't want
387 # people using filename as a multi-directory path fragment.
388 return self.Copy(local_path,
389 '%s/%s' % (remote_dir, os.path.basename(filename)),
393 def _GetTrackerFilenames(dest_path):
394 """Returns a list of gsutil tracker filenames.
396 Tracker files are used by gsutil to resume downloads/uploads. This
397 function does not handle parallel uploads.
400 dest_path: Either a GS path or an absolute local path.
403 The list of potential tracker filenames.
405 dest = urlparse.urlsplit(dest_path)
407 if dest.scheme == 'gs':
409 bucket_name = dest.netloc
410 object_name = dest.path.lstrip('/')
412 re.sub(r'[/\\]', '_', 'resumable_upload__%s__%s.url' %
413 (bucket_name, object_name)))
417 re.sub(r'[/\\]', '_', 'resumable_download__%s.etag' % dest.path))
419 hashed_filenames = []
420 for filename in filenames:
421 if not isinstance(filename, unicode):
422 filename = unicode(filename, 'utf8').encode('utf-8')
423 m = hashlib.sha1(filename)
424 hashed_filenames.append('%s_TRACKER_%s.%s' %
425 (prefix, m.hexdigest(), filename[-16:]))
427 return hashed_filenames
429 def _RetryFilter(self, e):
430 """Function to filter retry-able RunCommandError exceptions.
433 e: Exception object to filter. Exception may be re-raised as
434 as different type, if _RetryFilter determines a more appropriate
435 exception type based on the contents of e.
438 True for exceptions thrown by a RunCommand gsutil that should be retried.
440 if not retry_util.ShouldRetryCommandCommon(e):
443 # e is guaranteed by above filter to be a RunCommandError
445 if e.result.returncode < 0:
446 logging.info('Child process received signal %d; not retrying.',
447 -e.result.returncode)
450 error = e.result.error
452 if 'GSResponseError' in error:
453 if 'code=PreconditionFailed' in error:
454 raise GSContextPreconditionFailed(e)
455 if 'code=NoSuchKey' in error:
458 # If the file does not exist, one of the following errors occurs.
459 if ('InvalidUriError:' in error or
460 'Attempt to get key for' in error or
461 'CommandException: No URIs matched' in error or
462 'CommandException: One or more URIs matched no objects' in error or
463 'CommandException: No such object' in error or
464 'Some files could not be removed' in error or
465 'does not exist' in error):
468 logging.warning('GS_ERROR: %s', error)
470 # TODO: Below is a list of known flaky errors that we should
471 # retry. The list needs to be extended.
473 # Temporary fix: remove the gsutil tracker files so that our retry
474 # can hit a different backend. This should be removed after the
475 # bug is fixed by the Google Storage team (see crbug.com/308300).
476 if (self.RESUMABLE_DOWNLOAD_ERROR in error or
477 self.RESUMABLE_UPLOAD_ERROR in error or
478 'ResumableUploadException' in error or
479 'ResumableDownloadException' in error):
481 # Only remove the tracker files if we try to upload/download a file.
482 if 'cp' in e.result.cmd[:-2]:
483 # Assume a command: gsutil [options] cp [options] src_path dest_path
484 # dest_path needs to be a fully qualified local path, which is already
485 # required for GSContext.Copy().
486 tracker_filenames = self._GetTrackerFilenames(e.result.cmd[-1])
487 logging.info('Potential list of tracker files: %s',
489 for tracker_filename in tracker_filenames:
490 tracker_file_path = os.path.join(self.DEFAULT_GSUTIL_TRACKER_DIR,
492 if os.path.exists(tracker_file_path):
493 logging.info('Deleting gsutil tracker file %s before retrying.',
495 logging.info('The content of the tracker file: %s',
496 osutils.ReadFile(tracker_file_path))
497 osutils.SafeUnlink(tracker_file_path)
500 # We have seen flaky errors with 5xx return codes.
501 if 'GSResponseError: status=5' in error:
506 # TODO(mtennant): Make a private method.
507 def DoCommand(self, gsutil_cmd, headers=(), retries=None, version=None,
508 parallel=False, **kwargs):
509 """Run a gsutil command, suppressing output, and setting retry/sleep.
512 gsutil_cmd: The (mostly) constructed gsutil subcommand to run.
513 headers: A list of raw headers to pass down.
514 parallel: Whether gsutil should enable parallel copy/update of multiple
515 files. NOTE: This option causes gsutil to use significantly more
516 memory, even if gsutil is only uploading one file.
517 retries: How many times to retry this command (defaults to setting given
519 version: If given, the generation; essentially the timestamp of the last
520 update. Note this is not the same as sequence-number; it's
521 monotonically increasing bucket wide rather than reset per file.
522 The usage of this is if we intend to replace/update only if the version
523 is what we expect. This is useful for distributed reasons- for example,
524 to ensure you don't overwrite someone else's creation, a version of
525 0 states "only update if no version exists".
528 A RunCommandResult object.
530 kwargs = kwargs.copy()
531 kwargs.setdefault('redirect_stderr', True)
533 cmd = [self.gsutil_bin]
534 cmd += self.gsutil_flags
535 for header in headers:
536 cmd += ['-h', header]
537 if version is not None:
538 cmd += ['-h', 'x-goog-if-generation-match:%d' % int(version)]
540 # Enable parallel copy/update of multiple files if stdin is not to
541 # be piped to the command. This does not split a single file into
542 # smaller components for upload.
543 if parallel and kwargs.get('input') is None:
546 cmd.extend(gsutil_cmd)
549 retries = self.retries
551 extra_env = kwargs.pop('extra_env', {})
552 extra_env.setdefault('BOTO_CONFIG', self.boto_file)
555 logging.debug("%s: would've run: %s", self.__class__.__name__,
556 cros_build_lib.CmdToStr(cmd))
559 return retry_util.GenericRetry(self._RetryFilter,
560 retries, cros_build_lib.RunCommand,
561 cmd, sleep=self._sleep_time,
562 extra_env=extra_env, **kwargs)
563 except cros_build_lib.RunCommandError as e:
564 raise GSCommandError(e.msg, e.result, e.exception)
566 def Copy(self, src_path, dest_path, acl=None, recursive=False,
567 skip_symlinks=True, **kwargs):
568 """Copy to/from GS bucket.
570 Canned ACL permissions can be specified on the gsutil cp command line.
573 https://developers.google.com/storage/docs/accesscontrol#applyacls
576 src_path: Fully qualified local path or full gs:// path of the src file.
577 dest_path: Fully qualified local path or full gs:// path of the dest
579 acl: One of the google storage canned_acls to apply.
580 recursive: Whether to copy recursively.
581 skip_symlinks: Skip symbolic links when copying recursively.
584 Return the CommandResult from the run.
587 RunCommandError if the command failed despite retries.
595 acl = self.acl if acl is None else acl
599 with cros_build_lib.ContextManagerStack() as stack:
600 # Write the input into a tempfile if possible. This is needed so that
601 # gsutil can retry failed requests.
602 if src_path == '-' and kwargs.get('input') is not None:
603 f = stack.Add(tempfile.NamedTemporaryFile)
604 f.write(kwargs['input'])
609 cmd += ['--', src_path, dest_path]
611 if not (src_path.startswith(BASE_GS_URL) or
612 dest_path.startswith(BASE_GS_URL)):
613 # Don't retry on local copies.
614 kwargs.setdefault('retries', 0)
616 return self.DoCommand(cmd, **kwargs)
618 # TODO(mtennant): Merge with LS() after it supports returning details.
619 def LSWithDetails(self, path, **kwargs):
620 """Does a detailed directory listing of the given gs path.
623 path: The path to get a listing of.
626 List of tuples, where each tuple is (gs path, file size in bytes integer,
627 file modified time as datetime.datetime object).
629 kwargs['redirect_stdout'] = True
630 result = self.DoCommand(['ls', '-l', '--', path], **kwargs)
632 lines = result.output.splitlines()
634 # Output like the followig is expected:
635 # 99908 2014-03-01T05:50:08Z gs://somebucket/foo/abc
636 # 99908 2014-03-04T01:16:55Z gs://somebucket/foo/def
637 # TOTAL: 2 objects, 199816 bytes (495.36 KB)
639 # The last line is expected to be a summary line. Ignore it.
641 for line in lines[:-1]:
642 match = LS_LA_RE.search(line)
643 size, timestamp, url = (match.group(1), match.group(2), match.group(3))
645 timestamp = datetime.datetime.strptime(timestamp, DATETIME_FORMAT)
648 size = int(size) if size else None
649 url_tuples.append((url, size, timestamp))
653 # TODO(mtennant): Enhance to add details to returned results, such as
654 # size, modified time, generation.
655 def LS(self, path, raw=False, **kwargs):
656 """Does a directory listing of the given gs path.
659 path: The path to get a listing of.
660 raw: Return the raw CommandResult object instead of parsing it.
661 kwargs: See options that DoCommand takes.
664 If raw is False, a list of paths that matched |path|. Might be more
665 than one if a directory or path include wildcards/etc...
666 If raw is True, then the CommandResult object.
668 kwargs['redirect_stdout'] = True
669 if not path.startswith(BASE_GS_URL):
670 # gsutil doesn't support listing a local path, so just run 'ls'.
671 kwargs.pop('retries', None)
672 kwargs.pop('headers', None)
673 result = cros_build_lib.RunCommand(['ls', path], **kwargs)
675 result = self.DoCommand(['ls', '--', path], **kwargs)
680 # TODO: Process resulting lines when given -l/-a.
681 # See http://crbug.com/342918 for more details.
682 return result.output.splitlines()
684 def DU(self, path, **kwargs):
685 """Returns size of an object."""
686 return self.DoCommand(['du', path], redirect_stdout=True, **kwargs)
688 def SetACL(self, upload_url, acl=None):
689 """Set access on a file already in google storage.
692 upload_url: gs:// url that will have acl applied to it.
693 acl: An ACL permissions file or canned ACL.
697 raise GSContextException(
698 "SetAcl invoked w/out a specified acl, nor a default acl.")
701 self.DoCommand(['acl', 'set', acl, upload_url])
703 def Exists(self, path, **kwargs):
704 """Checks whether the given object exists.
707 path: Full gs:// url of the path to check.
710 True if the path exists; otherwise returns False.
713 # Use 'gsutil stat' command to check for existence. It is not
714 # subject to caching behavior of 'gsutil ls', and it only requires
715 # read access to the file, unlike 'gsutil acl get'.
716 self.DoCommand(['stat', path], redirect_stdout=True, **kwargs)
718 # A path that does not exist will result in error output like:
719 # InvalidUriError: Attempt to get key for "gs://foo/bar"
720 # That will result in GSNoSuchKey.
724 def Remove(self, path, ignore_missing=False):
725 """Remove the specified file.
728 path: Full gs:// url of the file to delete.
729 ignore_missing: Whether to suppress errors about missing files.
732 self.DoCommand(['rm', path])
734 if not ignore_missing:
737 def GetGeneration(self, path):
738 """Get the generation and metageneration of the given |path|.
741 A tuple of the generation and metageneration.
744 if res and res.returncode == 0 and res.output is not None:
745 # Search for a header that looks like this:
746 # header: x-goog-generation: 1378856506589000
747 m = re.search(r'header: %s: (\d+)' % name, res.output)
749 return int(m.group(1))
753 res = self.DoCommand(['-d', 'acl', 'get', path],
754 error_code_ok=True, redirect_stdout=True)
756 # If a DoCommand throws an error, 'res' will be None, so _Header(...)
757 # will return 0 in both of the cases below.
760 return (_Header('x-goog-generation'), _Header('x-goog-metageneration'))
762 def Counter(self, path):
763 """Return a GSCounter object pointing at a |path| in Google Storage.
766 path: The path to the counter in Google Storage.
768 return GSCounter(self, path)
770 def WaitForGsPaths(self, paths, timeout, period=10):
771 """Wait until a list of files exist in GS.
774 paths: The list of files to wait for.
775 timeout: Max seconds to wait for file to appear.
776 period: How often to check for files while waiting.
779 timeout_util.TimeoutError if the timeout is reached.
781 # Copy the list of URIs to wait for, so we don't modify the callers context.
782 pending_paths = paths[:]
784 def _CheckForExistence():
785 pending_paths[:] = [x for x in pending_paths if not self.Exists(x)]
787 def _Retry(_return_value):
788 # Retry, if there are any pending paths left.
791 timeout_util.WaitForSuccess(_Retry, _CheckForExistence,
792 timeout=timeout, period=period)
795 @contextlib.contextmanager
796 def TemporaryURL(prefix):
797 """Context manager to generate a random URL.
799 At the end, the URL will be deleted.
801 url = '%s/chromite-temp/%s/%s/%s' % (constants.TRASH_BUCKET, prefix,
802 getpass.getuser(), uuid.uuid1())
804 ctx.Remove(url, ignore_missing=True)
808 ctx.Remove(url, ignore_missing=True)