from glob import glob
import xml.etree.ElementTree as ET
import hashlib
+import yaml
from pymongo import MongoClient
-DATABASE = 'memcheck'
+PRODUCT_NAME = 'dldt' # product name from build manifest
+DATABASE = 'memcheck' # database name for memcheck results
RE_GTEST_MODEL_XML = re.compile(r'<model[^>]*>')
RE_GTEST_CUR_MEASURE = re.compile(
r'Current values of virtual memory consumption')
os.path.join(os.path.dirname(getsourcefile(lambda: 0)), relative_path))
+def metadata_from_manifest(manifest):
+ """ Extract commit metadata for memcheck record from manifest
+ """
+ with open(manifest, 'r') as manifest_file:
+ manifest = yaml.safe_load(manifest_file)
+ repo_trigger = next(
+ repo for repo in manifest['components'][PRODUCT_NAME]['repository'] if repo['trigger'])
+ # parse OS name/version
+ product_type_str = manifest['components'][PRODUCT_NAME]['product_type']
+ product_type = product_type_str.split('_')
+ if len(product_type) != 5 or product_type[2] != 'ubuntu':
+ logging.error('Product type %s is not supported', product_type_str)
+ return {}
+ return {
+ 'os_name': product_type[2],
+ 'os_version': [product_type[3], product_type[4]],
+ 'commit_sha': repo_trigger['revision'],
+ 'commit_date': repo_trigger['commit_time'],
+ 'repo_url': repo_trigger['url'],
+ 'target_branch': repo_trigger['target_branch'],
+ 'event_type': manifest['components'][PRODUCT_NAME]['build_event'].lower(),
+ }
+
+
def parse_memcheck_log(log_path):
""" Parse memcheck log
"""
collection = client[DATABASE][db_collection]
result = []
for record in records:
- query = dict((key, record[key]) for key in similarity)
- query['commit_date'] = {'$lt': record['commit_date']}
- pipeline = [
- {'$match': query},
- {'$addFields': {'commit_date': {'$dateFromString': {'dateString': '$commit_date'}}}},
- {'$sort': {'commit_date': -1}},
- {'$limit': max_items},
- {'$sort': {'commit_date': 1}},
- ]
- items = list(collection.aggregate(pipeline)) + [record]
+ items = []
+ try:
+ query = dict((key, record[key]) for key in similarity)
+ query['commit_date'] = {'$lt': record['commit_date']}
+ pipeline = [
+ {'$match': query},
+ {'$addFields': {
+ 'commit_date': {'$dateFromString': {'dateString': '$commit_date'}}}},
+ {'$sort': {'commit_date': -1}},
+ {'$limit': max_items},
+ {'$sort': {'commit_date': 1}},
+ ]
+ items += list(collection.aggregate(pipeline))
+ except KeyError:
+ pass # keep only the record if timeline failed to generate
+ items += [record]
timeline = _transpose_dicts(items, template=record)
result += [timeline]
return result
--- /dev/null
+#!/usr/bin/env python3
+# Copyright (C) 2020 Intel Corporation
+# SPDX-License-Identifier: Apache-2.0
+#
+"""
+This script runs memcheck tests isolated with help of gtest_parallel. It can
+upload memory measurment results to database and generate reports.
+"""
+
+import argparse
+from glob import glob
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from memcheck_upload import create_memcheck_records, \
+ upload_memcheck_records, \
+ create_memcheck_report, \
+ metadata_from_manifest
+
+DATABASE = 'memcheck'
+COLLECTIONS = ["commit", "nightly", "weekly"]
+
+
+def run(args, log=None, verbose=True):
+ """ Run command
+ """
+ if log is None:
+ log = logging.getLogger('run_memcheck')
+ log_out = log.info if verbose else log.debug
+
+ log.info(f'========== cmd: {" ".join(args)}') # pylint: disable=logging-format-interpolation
+
+ proc = subprocess.Popen(args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding='utf-8',
+ universal_newlines=True)
+ output = []
+ for line in iter(proc.stdout.readline, ''):
+ log_out(line.strip('\n'))
+ output.append(line)
+ if line or proc.poll() is None:
+ continue
+ break
+ outs = proc.communicate()[0]
+
+ if outs:
+ log_out(outs.strip('\n'))
+ output.append(outs)
+ log.info('========== Completed. Exit code: %d', proc.returncode)
+ return proc.returncode, ''.join(output)
+
+
+def json_load(path_or_string):
+ """ Load json as file or as string
+ """
+ if os.path.isfile(path_or_string):
+ with open(path_or_string, 'r') as json_fp:
+ return json.load(json_fp)
+ else:
+ return json.loads(path_or_string)
+
+
+def main():
+ """Main entry point.
+ """
+ # remove additional args (arguments after --)
+ binary_args = []
+ for idx, arg in enumerate(sys.argv):
+ if arg == '--':
+ binary_args = sys.argv[idx+1:]
+ sys.argv = sys.argv[:idx]
+ break
+
+ init_parser = argparse.ArgumentParser(add_help=False)
+ init_parser.add_argument('--timeline_report',
+ help=f'Create timeline HTML report file name.')
+ init_parser.add_argument('--upload', action="store_true",
+ help=f'Upload results to database.')
+ args = init_parser.parse_known_args()[0]
+
+ parser = argparse.ArgumentParser(
+ description='Run memcheck tests',
+ usage='%(prog)s [options] binary -- [additional args]',
+ parents=[init_parser])
+ parser.add_argument('binary', help='test binary to execute')
+ parser.add_argument('--gtest_parallel', help='Path to gtest-parallel to use.',
+ default='gtest_parallel')
+ parser.add_argument('-d', '--output_dir',
+ required=args.timeline_report or args.upload,
+ help='output directory for test logs')
+ parser.add_argument('-w', '--workers', help='number of gtest-parallel workers to spawn')
+
+ parser.add_argument('--db_url',
+ required=args.timeline_report or args.upload,
+ help='MongoDB URL in a form "mongodb://server:port"')
+ parser.add_argument('--db_collection',
+ required=args.timeline_report or args.upload,
+ help=f'use collection name in {DATABASE} database',
+ choices=COLLECTIONS)
+ parser.add_argument('--manifest',
+ help=f'extract commit information from build manifest')
+ parser.add_argument('--metadata',
+ help=f'add extra commit information, json formated')
+ parser.add_argument('--strip_log_path',
+ metavar='REMOVE[,REPLACE]',
+ default='',
+ help='remove or replace parts of log path')
+
+ args = parser.parse_args()
+
+ logging.basicConfig(format="{file} %(levelname)s: %(message)s".format(
+ file=os.path.basename(__file__)), level=logging.INFO, stream=sys.stdout)
+
+ if args.output_dir:
+ if not os.path.exists(args.output_dir):
+ os.makedirs(args.output_dir)
+ else:
+ if list(glob(os.path.join(args.output_dir, '**', '*.log'), recursive=True)):
+ logging.error(
+ 'Output directory %s already has test logs.' \
+ 'Please specify an empty directory for output logs',
+ args.output_dir)
+ sys.exit(1)
+
+ returncode, _ = run([sys.executable, args.gtest_parallel] +
+ (['--output_dir', f'{args.output_dir}'] if args.output_dir else []) +
+ (['--workers', f'{args.workers}'] if args.workers else []) +
+ [args.binary] +
+ ['--'] + binary_args)
+
+ if args.upload or args.timeline_report:
+ # prepare commit information
+ append = {}
+ if args.manifest:
+ append.update(metadata_from_manifest(args.manifest))
+ if args.metadata:
+ append.update(json_load(args.metadata))
+ # prepare memcheck records from logs
+ logs = list(glob(os.path.join(args.output_dir, '**', '*.log'), recursive=True))
+ strip = args.strip_log_path.split(',') + ['']
+ records = create_memcheck_records(logs, strip[1], strip[0], append=append)
+ logging.info('Prepared %d records', len(records))
+ if len(records) != len(logs):
+ logging.warning('Skipped %d logs of %d', len(logs) - len(records), len(logs))
+
+ # upload
+ if args.upload:
+ if records:
+ upload_memcheck_records(records, args.db_url, args.db_collection)
+ logging.info('Uploaded to %s/%s.%s', args.db_url, DATABASE, args.db_collection)
+ else:
+ logging.warning('No records to upload')
+
+ # create timeline report
+ if args.timeline_report:
+ create_memcheck_report(records, args.db_url, args.db_collection, args.timeline_report)
+ logging.info('Created memcheck report %s', args.timeline_report)
+ sys.exit(returncode)
+
+
+if __name__ == "__main__":
+ main()