import re
from collections import defaultdict
-from StringIO import StringIO
+from io import StringIO
from multiprocessing.pool import ThreadPool
from functools import partial
def check_target_prj(submissions):
"""Check if target projects are the same for all submissions"""
result = defaultdict(list)
- for submission, data in submissions.iteritems():
+ for submission, data in submissions.items():
result[data['meta']['obs_target_prj']].append(submission)
if len(result) > 1:
message = '\n'.join('%s: %s' % (project, ' '.join(subms)) \
- for project, subms in result.iteritems())
+ for project, subms in result.items())
raise RepaException('Target projects differ:\n%s\n' % message)
def check_build_results(bresults):
"""Check if build targets are published."""
for subm, _, results in bresults:
- for target, res in results.iteritems():
+ for target, res in results.items():
if res['state'] != 'published' or res['code'] != 'published':
if res['packages']:
raise RepaException("%s: target %s is not published yet" %
"""
binaries = defaultdict(dict)
result = set(submissions.keys())
- for submission, data in sorted(submissions.iteritems()):
+ for submission, data in sorted(submissions.items()):
pkgs = list(obs.get_binary_packages(data['project']))
# check if submission has binary packages
for repo, bins in pkgs:
# check if submissions have common packages
- for subm, info in binaries.iteritems():
+ for subm, info in binaries.items():
if repo in info:
common = set(info[repo]).intersection(bins)
if common and noaggregate:
common = set(pkg for pkg in common \
if not re.match(noaggregate, pkg))
if common:
- print '%s and %s have %d common packages,' \
+ print('%s and %s have %d common packages,' \
' skipping %s' % (subm, submission,
- len(common), submission)
+ len(common), submission))
if submission in result:
result.remove(submission)
break
if processes > 1:
pool = ThreadPool(processes=processes)
for subm, prj, results in bresults:
- for res in results.itervalues():
+ for res in results.values():
for pkg, state in res['packages']:
if state == 'succeeded' and pkg not in aggregated:
if processes > 1:
callback=callback)
else:
obs.aggregate_package(prj, pkg, gproject, pkg)
- print 'aggregated %s/%s' % (subm, pkg)
+ print('aggregated %s/%s' % (subm, pkg))
aggregated.add(pkg)
if processes > 1:
check_target_prj(info)
bresults = [(subm, data['project'], data['build_results']) \
- for subm, data in info.iteritems()]
+ for subm, data in info.items()]
check_build_results(bresults)
# filter out conflicting submissions
filtered = check_binary_pkgs(obs, info, noaggregate)
bresults = [item for item in bresults if item[0] in filtered]
- info = dict(item for item in info.iteritems() if item[0] in filtered)
+ info = dict(item for item in info.items() if item[0] in filtered)
# create group project
- name, gproject = create_group_project(obs, info.keys(),
- info.itervalues().next()['meta'],
+ name, gproject = create_group_project(obs, list(info.keys()),
+ iter(info.values()).next()['meta'],
comment)
- print 'Created submit group %s\n' % name
+ print('Created submit group %s\n' % name)
aggregated = aggregate(obs, bresults, gproject, processes)
- print '\n%d submissions (%d packages) have been merged into %s' % \
- (len(info), len(aggregated), name)
+ print('\n%d submissions (%d packages) have been merged into %s' % \
+ (len(info), len(aggregated), name))
class Group(object):
"""Subcommand: Manage group submissions."""