1 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Module containing class for recording metadata about a run."""
7 from __future__ import print_function
14 import multiprocessing
19 from chromite.cbuildbot import archive_lib
20 from chromite.cbuildbot import cbuildbot_config
21 from chromite.cbuildbot import results_lib
22 from chromite.cbuildbot import constants
23 from chromite.lib import cros_build_lib
24 from chromite.lib import gs
25 from chromite.lib import parallel
27 # Number of parallel processes used when uploading/downloading GS files.
30 ARCHIVE_ROOT = 'gs://chromeos-image-archive/%(target)s'
31 # NOTE: gsutil 3.42 has a bug where '/' is ignored in this context unless it
32 # is listed twice. So we list it twice here for now.
33 METADATA_URL_GLOB = os.path.join(ARCHIVE_ROOT,
34 'R%(milestone)s**//metadata.json')
35 LATEST_URL = os.path.join(ARCHIVE_ROOT, 'LATEST-master')
38 GerritPatchTuple = collections.namedtuple('GerritPatchTuple',
39 ['gerrit_number', 'patch_number',
41 GerritChangeTuple = collections.namedtuple('GerritChangeTuple',
42 ['gerrit_number', 'internal'])
43 CLActionTuple = collections.namedtuple('CLActionTuple',
44 ['change', 'action', 'timestamp',
46 CLActionWithBuildTuple = collections.namedtuple('CLActionWithBuildTuple',
47 ['change', 'action', 'timestamp', 'reason', 'bot_type', 'build'])
50 def GetChangeAsSmallDictionary(change):
51 """Returns a small dictionary representation of a gerrit change.
54 change: A GerritPatch or GerritPatchTuple object.
57 A dictionary of the form {'gerrit_number': change.gerrit_number,
58 'patch_number': change.patch_number,
59 'internal': change.internal}
61 return {'gerrit_number': change.gerrit_number,
62 'patch_number': change.patch_number,
63 'internal': change.internal}
66 def GetCLActionTuple(change, action, timestamp=None, reason=None):
67 """Returns a CLActionTuple suitable for recording in metadata or cidb.
70 change: A GerritPatch or GerritPatchTuple object.
71 action: The action taken, should be one of constants.CL_ACTIONS
72 timestamp: An integer timestamp such as int(time.time()) at which
73 the action was taken. Default: Now.
74 reason: Description of the reason the action was taken. Default: ''
77 GetChangeAsSmallDictionary(change),
79 timestamp or int(time.time()),
83 class _DummyLock(object):
84 """A Dummy clone of RLock that does nothing."""
85 def acquire(self, blocking=1):
91 class CBuildbotMetadata(object):
92 """Class for recording metadata about a run."""
94 def __init__(self, metadata_dict=None, multiprocess_manager=None):
95 """Constructor for CBuildbotMetadata.
98 metadata_dict: Optional dictionary containing initial metadata,
99 as returned by loading metadata from json.
100 multiprocess_manager: Optional multiprocess.Manager instance. If
101 supplied, the metadata instance will use
102 multiprocess containers so that its state
103 is correctly synced across processes.
105 super(CBuildbotMetadata, self).__init__()
106 if multiprocess_manager:
107 self._metadata_dict = multiprocess_manager.dict()
108 self._cl_action_list = multiprocess_manager.list()
109 self._per_board_dict = multiprocess_manager.dict()
110 self._subdict_update_lock = multiprocess_manager.RLock()
112 self._metadata_dict = {}
113 self._cl_action_list = []
114 self._per_board_dict = {}
115 # If we are not using a manager, then metadata is not expected to be
116 # multiprocess safe. Use a dummy RLock.
117 self._subdict_update_lock = _DummyLock()
120 self.UpdateWithDict(metadata_dict)
123 def FromJSONString(json_string):
124 """Construct a CBuildbotMetadata from a json representation.
127 json_string: A string json representation of a CBuildbotMetadata
131 A CbuildbotMetadata instance.
133 return CBuildbotMetadata(json.loads(json_string))
135 def UpdateWithDict(self, metadata_dict):
136 """Update metadata dictionary with values supplied in |metadata_dict|
138 This method is effectively the inverse of GetDict. Existing key-values
139 in metadata will be overwritten by those supplied in |metadata_dict|,
140 with the exceptions of:
141 - the cl_actions list which will be extended with the contents (if any)
142 of the supplied dict's cl_actions list.
143 - the per-board metadata dict, which will be recursively extended with the
144 contents of the supplied dict's board-metadata
147 metadata_dict: A dictionary of key-value pairs to be added this
148 metadata instance. Keys should be strings, values
154 # This is effectively the inverse of the dictionary construction in GetDict,
155 # to reconstruct the correct internal representation of a metadata
157 metadata_dict = metadata_dict.copy()
158 cl_action_list = metadata_dict.pop('cl_actions', None)
159 per_board_dict = metadata_dict.pop('board-metadata', None)
160 self._metadata_dict.update(metadata_dict)
162 self._cl_action_list.extend(cl_action_list)
164 for k, v in per_board_dict.items():
165 self.UpdateBoardDictWithDict(k, v)
169 def UpdateBoardDictWithDict(self, board, board_dict):
170 """Update the per-board dict for |board| with |board_dict|.
172 Note: both |board| and and all the keys of |board_dict| musts be strings
173 that do not contain the character ':'
178 # Wrap the per-board key-value pairs as key-value pairs in _per_board_dict.
179 # Note -- due to http://bugs.python.org/issue6766 it is not possible to
180 # store a multiprocess dict proxy inside another multiprocess dict proxy.
181 # That is why we are using this flattened representation of board dicts.
182 assert not ':' in board
183 # Even if board_dict is {}, ensure that an entry with this board
185 self._per_board_dict[board + ':'] = None
186 for k, v in board_dict.items():
188 self._per_board_dict['%s:%s' % (board, k)] = v
192 def UpdateKeyDictWithDict(self, key, key_metadata_dict):
193 """Update metadata for the given key with values supplied in |metadata_dict|
195 This method merges the dictionary for the given key with the given key
196 metadata dictionary (allowing them to be effectively updated from any
199 This method is multiprocess safe.
202 key: The key name (e.g. 'version' or 'status')
203 key_metadata_dict: A dictionary of key-value pairs to be added this
204 metadata key. Keys should be strings, values
210 with self._subdict_update_lock:
211 # If the key already exists, then use its dictionary
212 target_dict = self._metadata_dict.setdefault(key, {})
213 target_dict.update(key_metadata_dict)
214 self._metadata_dict[key] = target_dict
219 """Returns a dictionary representation of metadata."""
220 # CL actions are be stored in self._cl_action_list instead of
221 # in self._metadata_dict['cl_actions'], because _cl_action_list
222 # is potentially a multiprocess.lis. So, _cl_action_list needs to
223 # be copied into a normal list.
224 temp = self._metadata_dict.copy()
225 temp['cl_actions'] = list(self._cl_action_list)
227 # Similarly, the per-board dicts are stored in a flattened form in
228 # _per_board_dict. Un-flatten into nested dict.
230 for k, v in self._per_board_dict.items():
231 board, key = k.split(':')
232 board_dict = per_board_dict.setdefault(board, {})
236 temp['board-metadata'] = per_board_dict
239 # TODO(akeshet): crbug.com/406522 special case cl_actions and board-metadata
240 # so that GetValue can work with them as well.
241 def GetValue(self, key):
242 """Get an item from the metadata dictionary.
244 This method is in most cases an inexpensive equivalent to:
247 However, it cannot be used for items like 'cl_actions' or 'board-metadata'
248 which are not stored directly in the metadata dictionary.
250 return self._metadata_dict[key]
253 """Return a JSON string representation of metadata."""
254 return json.dumps(self.GetDict())
256 def RecordCLAction(self, change, action, timestamp=None, reason=''):
257 """Record an action that was taken on a CL, to the metadata.
260 change: A GerritPatch object for the change acted on.
261 action: The action taken, should be one of constants.CL_ACTIONS
262 timestamp: An integer timestamp such as int(time.time()) at which
263 the action was taken. Default: Now.
264 reason: Description of the reason the action was taken. Default: ''
269 self._cl_action_list.append(
270 GetCLActionTuple(change, action, timestamp, reason))
274 def GetReportMetadataDict(builder_run, get_changes_from_pool,
275 get_statuses_from_slaves, config=None, stage=None,
276 final_status=None, sync_instance=None,
277 completion_instance=None):
278 """Return a metadata dictionary summarizing a build.
280 This method replaces code that used to exist in the ArchivingStageMixin
281 class from cbuildbot_stage. It contains all the Report-stage-time
282 metadata construction logic. The logic here is intended to be gradually
283 refactored out so that the metadata is constructed gradually by the
284 stages that are responsible for pieces of data, as they run.
287 builder_run: BuilderRun instance for this run.
288 get_changes_from_pool: If True, information about patches in the
289 sync_instance.pool will be recorded.
290 get_statuses_from_slaves: If True, status information of slave
291 builders will be recorded.
292 config: The build config for this run. Defaults to self._run.config.
293 stage: The stage name that this metadata file is being uploaded for.
294 final_status: Whether the build passed or failed. If None, the build
295 will be treated as still running.
296 sync_instance: The stage instance that was used for syncing the source
297 code. This should be a derivative of SyncStage. If None,
298 the list of commit queue patches will not be included
300 completion_instance: The stage instance that was used to wait for slave
301 completion. Used to add slave build information to
302 master builder's metadata. If None, no such status
303 information will be included. It not None, this
304 should be a derivative of
305 MasterSlaveSyncCompletionStage.
308 A metadata dictionary suitable to be json-serialized.
310 config = config or builder_run.config
311 start_time = results_lib.Results.start_time
312 current_time = datetime.datetime.now()
313 start_time_stamp = cros_build_lib.UserDateTimeFormat(timeval=start_time)
314 current_time_stamp = cros_build_lib.UserDateTimeFormat(timeval=current_time)
315 duration = '%s' % (current_time - start_time,)
319 'current-time': current_time_stamp,
320 'status': final_status if final_status else 'running',
321 'summary': stage or '',
324 'start': start_time_stamp,
325 'finish': current_time_stamp if final_status else '',
326 'duration': duration,
330 metadata['results'] = []
331 for entry in results_lib.Results.Get():
332 timestr = datetime.timedelta(seconds=math.ceil(entry.time))
333 if entry.result in results_lib.Results.NON_FAILURE_TYPES:
334 status = constants.FINAL_STATUS_PASSED
336 status = constants.FINAL_STATUS_FAILED
337 metadata['results'].append({
340 # The result might be a custom exception.
341 'summary': str(entry.result),
342 'duration': '%s' % timestr,
343 'board': entry.board,
344 'description': entry.description,
345 'log': builder_run.ConstructDashboardURL(stage=entry.name),
348 if get_changes_from_pool:
350 pool = sync_instance.pool
351 for change in pool.changes:
352 details = {'gerrit_number': change.gerrit_number,
353 'patch_number': change.patch_number,
354 'internal': change.internal}
355 changes.append(details)
356 metadata['changes'] = changes
358 # If we were a CQ master, then include a summary of the status of slave cq
359 # builders in metadata
360 if get_statuses_from_slaves:
361 statuses = completion_instance.GetSlaveStatuses()
363 logging.warning('completion_instance did not have any statuses '
364 'to report. Will not add slave status to metadata.')
366 metadata['slave_targets'] = {}
367 for builder, status in statuses.iteritems():
368 metadata['slave_targets'][builder] = status.AsFlatDict()
373 # The graphite graphs use seconds since epoch start as time value.
374 EPOCH_START = datetime.datetime(1970, 1, 1)
376 # Formats we like for output.
377 NICE_DATE_FORMAT = '%Y/%m/%d'
378 NICE_TIME_FORMAT = '%H:%M:%S'
379 NICE_DATETIME_FORMAT = NICE_DATE_FORMAT + ' ' + NICE_TIME_FORMAT
382 # TODO(akeshet): Merge this class into CBuildbotMetadata.
383 class BuildData(object):
384 """Class for examining metadata from a prior run.
386 The raw metadata dict can be accessed at self.metadata_dict or via []
387 and get() on a BuildData object. Some values from metadata_dict are
388 also surfaced through the following list of supported properties:
414 'gathered_dict', # Dict with gathered data (sheets/carbon version).
415 'gathered_url', # URL to metadata.json.gathered location in GS.
416 'metadata_dict', # Dict representing metadata data from JSON.
417 'metadata_url', # URL to metadata.json location in GS.
420 # Regexp for parsing datetimes as stored in metadata.json. Example text:
421 # Fri, 14 Feb 2014 17:00:49 -0800 (PST)
422 DATETIME_RE = re.compile(r'^(.+)\s-\d\d\d\d\s\(P\wT\)$')
424 SHEETS_VER_KEY = 'sheets_version'
425 CARBON_VER_KEY = 'carbon_version'
428 def ReadMetadataURLs(urls, gs_ctx=None, exclude_running=True,
429 get_sheets_version=False):
430 """Read a list of metadata.json URLs and return BuildData objects.
433 urls: List of metadata.json GS URLs.
434 gs_ctx: A GSContext object to use. If not provided gs.GSContext will
435 be called to get a GSContext to use.
436 exclude_running: If True the metadata for builds that are still running
438 get_sheets_version: Whether to try to figure out the last sheets version
439 and the last carbon version that was gathered. This requires an extra
440 gsutil request and is only needed if you are writing the metadata to
441 to the Google Sheets spreadsheet.
444 List of BuildData objects.
446 gs_ctx = gs_ctx or gs.GSContext()
447 cros_build_lib.Info('Reading %d metadata URLs using %d processes now.',
448 len(urls), MAX_PARALLEL)
450 build_data_per_url = {}
451 def _ReadMetadataURL(url):
452 # Read the metadata.json URL and parse json into a dict.
453 metadata_dict = json.loads(gs_ctx.Cat(url, print_cmd=False))
455 # Read the file next to url which indicates whether the metadata has
456 # been gathered before, and with what stats version.
457 if get_sheets_version:
459 gathered_url = url + '.gathered'
460 if gs_ctx.Exists(gathered_url, print_cmd=False):
461 gathered_dict = json.loads(gs_ctx.Cat(gathered_url,
464 sheets_version = gathered_dict.get(BuildData.SHEETS_VER_KEY)
465 carbon_version = gathered_dict.get(BuildData.CARBON_VER_KEY)
467 sheets_version, carbon_version = None, None
469 bd = BuildData(url, metadata_dict, sheets_version=sheets_version,
470 carbon_version=carbon_version)
472 if bd.build_number is None:
473 cros_build_lib.Warning('Metadata at %s was missing build number.',
475 m = re.match(r'.*-b([0-9]*)/.*', url)
477 inferred_number = int(m.groups()[0])
478 cros_build_lib.Warning('Inferred build number %d from metadata url.',
480 bd.metadata_dict['build-number'] = inferred_number
481 if not (sheets_version is None and carbon_version is None):
482 cros_build_lib.Debug('Read %s:\n'
483 ' build_number=%d, sheets v%d, carbon v%d', url,
484 bd.build_number, sheets_version, carbon_version)
486 cros_build_lib.Debug('Read %s:\n build_number=%d, ungathered',
487 url, bd.build_number)
489 build_data_per_url[url] = bd
491 with multiprocessing.Manager() as manager:
492 build_data_per_url = manager.dict()
493 parallel.RunTasksInProcessPool(_ReadMetadataURL, [[url] for url in urls],
494 processes=MAX_PARALLEL)
495 builds = [build_data_per_url[url] for url in urls]
498 builds = [b for b in builds if b.status != 'running']
502 def MarkBuildsGathered(builds, sheets_version, carbon_version, gs_ctx=None):
503 """Mark specified |builds| as processed for the given stats versions.
506 builds: List of BuildData objects.
507 sheets_version: The Google Sheets version these builds are now processed
509 carbon_version: The Carbon/Graphite version these builds are now
511 gs_ctx: A GSContext object to use, if set.
513 gs_ctx = gs_ctx or gs.GSContext()
515 # Filter for builds that were not already on these versions.
516 builds = [b for b in builds
517 if b.sheets_version != sheets_version or
518 b.carbon_version != carbon_version]
520 log_ver_str = 'Sheets v%d, Carbon v%d' % (sheets_version, carbon_version)
521 cros_build_lib.Info('Marking %d builds gathered (for %s) using %d'
522 ' processes now.', len(builds), log_ver_str,
525 def _MarkGathered(build):
526 build.MarkGathered(sheets_version, carbon_version)
527 json_text = json.dumps(build.gathered_dict.copy())
528 gs_ctx.Copy('-', build.gathered_url, input=json_text, print_cmd=False)
529 cros_build_lib.Debug('Marked build_number %d processed for %s.',
530 build.build_number, log_ver_str)
532 inputs = [[build] for build in builds]
533 parallel.RunTasksInProcessPool(_MarkGathered, inputs,
534 processes=MAX_PARALLEL)
536 def __init__(self, metadata_url, metadata_dict, carbon_version=None,
537 sheets_version=None):
538 self.metadata_url = metadata_url
539 self.metadata_dict = metadata_dict
541 # If a stats version is not specified default to -1 so that the initial
542 # version (version 0) will be considered "newer".
543 self.gathered_url = metadata_url + '.gathered'
544 self.gathered_dict = {
545 self.CARBON_VER_KEY: -1 if carbon_version is None else carbon_version,
546 self.SHEETS_VER_KEY: -1 if sheets_version is None else sheets_version,
549 def MarkGathered(self, sheets_version, carbon_version):
550 """Mark this build as processed for the given stats versions."""
551 self.gathered_dict[self.SHEETS_VER_KEY] = sheets_version
552 self.gathered_dict[self.CARBON_VER_KEY] = carbon_version
554 def __getitem__(self, key):
555 """Relay dict-like access to self.metadata_dict."""
556 return self.metadata_dict[key]
558 def get(self, key, default=None):
559 """Relay dict-like access to self.metadata_dict."""
560 return self.metadata_dict.get(key, default)
563 def sheets_version(self):
564 return self.gathered_dict[self.SHEETS_VER_KEY]
567 def carbon_version(self):
568 return self.gathered_dict[self.CARBON_VER_KEY]
571 def build_number(self):
573 return int(self['build-number'])
579 return self['results']
583 return self.get('slave_targets', {})
586 def chromeos_version(self):
588 return self['version']['full']
593 def chrome_version(self):
595 return self['version']['chrome']
601 return self['bot-config']
605 return self.get('status', {}).get('status', None)
608 def _ToDatetime(cls, time_str):
609 match = cls.DATETIME_RE.search(time_str)
611 return datetime.datetime.strptime(match.group(1), '%a, %d %b %Y %H:%M:%S')
613 raise ValueError('Unexpected metadata datetime format: %s' % time_str)
616 def start_datetime(self):
617 return self._ToDatetime(self['time']['start'])
620 def finish_datetime(self):
621 return self._ToDatetime(self['time']['finish'])
624 def start_date_str(self):
625 return self.start_datetime.strftime(NICE_DATE_FORMAT)
628 def start_time_str(self):
629 return self.start_datetime.strftime(NICE_TIME_FORMAT)
632 def start_datetime_str(self):
633 return self.start_datetime.strftime(NICE_DATETIME_FORMAT)
636 def finish_date_str(self):
637 return self.finish_datetime.strftime(NICE_DATE_FORMAT)
640 def finish_time_str(self):
641 return self.finish_datetime.strftime(NICE_TIME_FORMAT)
644 def finish_datetime_str(self):
645 return self.finish_datetime.strftime(NICE_DATETIME_FORMAT)
648 def failure_message(self):
650 # Dedup the messages from the slaves.
651 for slave in self.GetFailedSlaves():
652 message = self.slaves[slave]['reason']
653 mapping[message] = mapping.get(message, []) + [slave]
656 for message, slaves in mapping.iteritems():
658 # Do not print all the names when there are more than 6 (an
659 # arbitrary number) builders.
660 message_list.append('%d buliders: %s' % (len(slaves), message))
662 message_list.append('%s: %s' % (','.join(slaves), message))
664 return ' | '.join(message_list)
666 def GetChangelistsStr(self):
668 for cl_dict in self.metadata_dict['changes']:
669 cl_strs.append('%s%s:%s' %
670 ('*' if cl_dict['internal'] == 'true' else '',
671 cl_dict['gerrit_number'], cl_dict['patch_number']))
673 return ' '.join(cl_strs)
675 def GetFailedStages(self, with_urls=False):
676 """Get names of all failed stages, optionally with URLs for each.
679 with_urls: If True then also return URLs. See Returns.
682 If with_urls is False, return list of stage names. Otherwise, return list
683 of tuples (stage name, stage URL).
686 # This can be more discerning in the future, such as for optional stages.
687 return stage['status'] == 'failed'
690 # The "log" url includes "/logs/stdio" on the end. Strip that off.
691 return [(s['name'], os.path.dirname(os.path.dirname(s['log'])))
692 for s in self.stages if _Failed(s)]
694 return [s['name'] for s in self.stages if _Failed(s)]
696 def GetFailedSlaves(self, with_urls=False):
698 return slave['status'] == 'fail'
700 # Older metadata has no slave_targets entry.
703 return [(name, slave['dashboard_url'])
704 for name, slave in slaves.iteritems() if _Failed(slave)]
706 return [name for name, slave in slaves.iteritems() if _Failed(slave)]
711 def runtime_seconds(self):
712 return (self.finish_datetime - self.start_datetime).seconds
715 def runtime_minutes(self):
716 return self.runtime_seconds / 60
719 def epoch_time_seconds(self):
720 # End time seconds since 1/1/1970, for some reason.
721 return int((self.finish_datetime - EPOCH_START).total_seconds())
725 return [GerritPatchTuple(gerrit_number=change['gerrit_number'],
726 patch_number=change['patch_number'],
727 internal=change['internal'])
728 for change in self.metadata_dict.get('changes', [])]
731 def count_changes(self):
732 if not self.metadata_dict.get('changes', None):
735 return len(self.metadata_dict['changes'])
739 return self.finish_datetime.strftime('%d.%m.%Y')
742 """Return True if this represents a successful run."""
743 return 'passed' == self.metadata_dict['status']['status'].strip()
747 def FindLatestFullVersion(builder, version):
748 """Find the latest full version number built by |builder| on |version|.
751 builder: Builder to load information from. E.g. daisy-release
752 version: Version that we are interested in. E.g. 5602.0.0
755 The latest corresponding full version number, including milestone prefix.
756 E.g. R35-5602.0.0. For some builders, this may also include a -rcN or
759 gs_ctx = gs.GSContext()
760 config = cbuildbot_config.config[builder]
761 base_url = archive_lib.GetBaseUploadURI(config)
762 latest_file_url = os.path.join(base_url, 'LATEST-%s' % version)
764 return gs_ctx.Cat(latest_file_url).strip()
765 except gs.GSNoSuchKey:
769 def GetBuildMetadata(builder, full_version):
770 """Fetch the metadata.json object for |builder| and |full_version|.
773 builder: Builder to load information from. E.g. daisy-release
774 full_version: Version that we are interested in, including milestone
775 prefix. E.g. R35-5602.0.0. For some builders, this may also include a
776 -rcN or -bNNNN suffix.
779 A newly created CBuildbotMetadata object with the metadata from the given
780 |builder| and |full_version|.
782 gs_ctx = gs.GSContext()
783 config = cbuildbot_config.config[builder]
784 base_url = archive_lib.GetBaseUploadURI(config)
786 archive_url = os.path.join(base_url, full_version)
787 metadata_url = os.path.join(archive_url, constants.METADATA_JSON)
788 output = gs_ctx.Cat(metadata_url)
789 return CBuildbotMetadata(json.loads(output))
790 except gs.GSNoSuchKey:
794 class MetadataException(Exception):
795 """Base exception class for exceptions in this module."""
798 class GetMilestoneError(MetadataException):
799 """Base exception class for exceptions in this module."""
802 def GetLatestMilestone():
803 """Get the latest milestone from CQ Master LATEST-master file."""
804 # Use CQ Master target to get latest milestone.
805 latest_url = LATEST_URL % {'target': constants.CQ_MASTER}
806 gs_ctx = gs.GSContext()
808 cros_build_lib.Info('Getting latest milestone from %s', latest_url)
810 content = gs_ctx.Cat(latest_url).strip()
812 # Expected syntax is like the following: "R35-1234.5.6-rc7".
813 assert content.startswith('R')
814 milestone = content.split('-')[0][1:]
815 cros_build_lib.Info('Latest milestone determined to be: %s', milestone)
816 return int(milestone)
818 except gs.GSNoSuchKey:
819 raise GetMilestoneError('LATEST file missing: %s' % latest_url)
822 def GetMetadataURLsSince(target, start_date):
823 """Get metadata.json URLs for |target| since |start_date|.
825 The modified time of the GS files is used to compare with start_date, so
826 the completion date of the builder run is what is important here.
829 target: Builder target name.
830 start_date: datetime.date object.
833 Metadata urls for runs found.
836 milestone = GetLatestMilestone()
837 gs_ctx = gs.GSContext()
839 base_url = METADATA_URL_GLOB % {'target': target, 'milestone': milestone}
840 cros_build_lib.Info('Getting %s builds for R%d from "%s"',
841 target, milestone, base_url)
844 # Get GS URLs. We want the datetimes to quickly know when we are done
846 urls = gs_ctx.List(base_url, details=True)
847 except gs.GSNoSuchKey:
848 # We ran out of metadata to collect. Stop searching back in time.
849 cros_build_lib.Info('No %s builds found for $%d. I will not continue'
850 ' search to older milestones.', target, milestone)
854 urls = sorted(urls, key=lambda x: x.creation_time, reverse=True)
856 # See if we have gone far enough back by checking datetime of oldest URL
857 # in the current batch.
858 if urls[-1].creation_time.date() < start_date:
859 # We want a subset of these URLs, then we are done.
860 ret.extend([x.url for x in urls if x.creation_time.date() >= start_date])
864 # Accept all these URLs, then continue on to the next milestone.
865 ret.extend([x.url for x in urls])
867 cros_build_lib.Info('Continuing on to R%d.', milestone)