+
+class SubunitWriter(ReportWriter):
+ """Reporter to output a subunit stream."""
+
+ def printout(self):
+ reporter = subunit.TestProtocolClient(sys.stdout)
+ for binary in self.binaries:
+ for tc in binary.testcases:
+ test = GTestCase(tc, binary)
+ test.run(reporter)
+
+
+class GTestCase(object):
+ """A representation of a gtester test result as a pyunit TestCase."""
+
+ def __init__(self, case, binary):
+ """Create a GTestCase for case 'case' from binary program 'binary'."""
+ self._case = case
+ self._binary = binary
+ # the name of the case - e.g. /dbusmenu/glib/objects/menuitem/props_boolstr
+ self._path = attribute_as_text(self._case, 'path')
+
+ def id(self):
+ """What test is this? Returns the gtester path for the testcase."""
+ return self._path
+
+ def _get_details(self):
+ """Calculate a details dict for the test - attachments etc."""
+ details = {}
+ result = attribute_as_text(self._case, 'result', 'status')
+ details['filename'] = Content(mime_utf8, lambda:[self._binary.file])
+ details['random_seed'] = Content(mime_utf8,
+ lambda:[self._binary.random_seed])
+ if self._get_outcome() == 'addFailure':
+ # Extract the error details. Skips have no details because its not
+ # skip like unittest does, instead the runner just bypasses N test.
+ txt = self._error_text(self._case)
+ details['error'] = Content(mime_utf8, lambda:[txt])
+ if self._get_outcome() == 'addSuccess':
+ # Sucessful tests may have performance metrics.
+ perflist = list_children(self._case, 'performance')
+ if perflist:
+ presults = []
+ for perf in perflist:
+ pmin = bool (int (attribute_as_text (perf, 'minimize')))
+ pmax = bool (int (attribute_as_text (perf, 'maximize')))
+ pval = float (attribute_as_text (perf, 'value'))
+ txt = node_as_text (perf)
+ txt = 'Performance(' + (pmin and 'minimized' or 'maximized'
+ ) + '): ' + txt.strip() + '\n'
+ presults += [(pval, txt)]
+ presults.sort()
+ perf_details = [e[1] for e in presults]
+ details['performance'] = Content(mime_utf8, lambda:perf_details)
+ return details
+
+ def _get_outcome(self):
+ if int(attribute_as_text(self._case, 'skipped') + '0'):
+ return 'addSkip'
+ outcome = attribute_as_text(self._case, 'result', 'status')
+ if outcome == 'success':
+ return 'addSuccess'
+ else:
+ return 'addFailure'
+
+ def run(self, result):
+ time = datetime.datetime.utcnow().replace(tzinfo=iso8601.Utc())
+ result.time(time)
+ result.startTest(self)
+ try:
+ outcome = self._get_outcome()
+ details = self._get_details()
+ # Only provide a duration IFF outcome == 'addSuccess' - the main
+ # parser claims bogus results otherwise: in that case emit time as
+ # zero perhaps.
+ if outcome == 'addSuccess':
+ duration = float(node_as_text(self._case, 'duration'))
+ duration = duration * 1000000
+ timedelta = datetime.timedelta(0, 0, duration)
+ time = time + timedelta
+ result.time(time)
+ getattr(result, outcome)(self, details=details)
+ finally:
+ result.stopTest(self)
+
+
+