import ast
from time import sleep
from datetime import datetime
+from datetime import timedelta
import urllib2
+import copy
from osc import conf, core
from common.buildservice import BuildService
"""Local error exception."""
pass
+def get_snapshot_sr_increment(src, target):
+ # Select matching SR info
+ # Get snapshots from dashboard db
+ if True:
+ n_start_date = int(src.split(':')[-1].split('.')[0])
+ n_end_date = int(target.split(':')[-1].split('.')[0])
+ start_version = int(src.split(':')[-1].split('.')[1])
+ end_version = int(target.split(':')[-1].split('.')[1])
+
+ start_date = datetime.strptime('%d' % n_start_date, '%Y%m%d') - timedelta(days=7)
+ end_date = datetime.strptime('%d' % n_end_date, '%Y%m%d') + timedelta(days=2)
+ start_date = start_date.strftime('%Y/%m/%d')
+ end_date = end_date.strftime('%Y/%m/%d')
+
+ path = os.path.join(os.getenv('PUBLIC_DASHBOARD_API_URL'),
+ 'snapshot.code?start=%s&end=%s&profile_filter=%s'
+ % (start_date, end_date, target.split(':ref:')[0]))
+
+ snapshot_history = []
+ board_data = requests.get(path).text
+ for line in board_data.replace('\\n', '').replace('\\', '').replace('null','""').split('\n'):
+ full_list = ast.literal_eval(line)
+ # Make sure ordering
+ full_list = sorted(full_list, key=lambda k:k['start_time'], reverse=False)
+ for sn in full_list:
+ sr_count = 0
+ if sn.get('project_name', None) == target.split(':ref:')[0]:# and sn.get('snapshot_url', None):
+ this_date = int(sn.get('snapshot_name').split('_')[-1].split('.')[0])
+ this_version = int(sn.get('snapshot_name').split('_')[-1].split('.')[1])
+ # Skip older/newer ones
+ if n_start_date > this_date: continue
+ if n_start_date == this_date and start_version >= this_version: continue
+ if n_end_date < this_date: continue
+ if n_end_date == this_date and end_version < this_version: continue
+
+ path = os.path.join(os.getenv('PUBLIC_DASHBOARD_API_URL'),
+ 'snapshot.code?snapshot=%s' % (sn.get('snapshot_name')))
+ srs_in_snapshot = requests.get(path).text
+ for line2 in srs_in_snapshot.replace('\\n', '').replace('\\', '').replace('null','""').split('\n'):
+ sr_list = ast.literal_eval(line2)
+ for sr in sr_list:
+ sr_count += 1
+ single_item = {'snapshot': sn.get('snapshot_name')}
+ single_item.update(sr)
+ snapshot_history.append(single_item)
+ print '%s %d' % (sn.get('snapshot_name'), sr_count)
+ sys.stdout.flush()
+
+ # Ordering in DESC
+ snapshot_history.reverse()
+ for x in snapshot_history:
+ x.get('packages').reverse()
+ x.get('submitters').reverse()
+
+ for x in snapshot_history:
+ print '\n', x.get('snapshot'), x.get('sr')
+ for pkgs in x.get('packages'):
+ print ' ', pkgs.get('repo'), pkgs.get('cid')
+
+ return snapshot_history
+
class ref_create_project_obs(object):
""" create reference project to obs """
if os.getenv("REF_ACTIVE_PRJS"):
def compare_with_manifest(self, todo_dict, manifest_packages):
#TODO: If we have changed git path???
#TODO: If manifest items are not proceeded???
- for item in todo_dict:
- if 'git' in todo_dict[item] and todo_dict[item]['git'] \
- and todo_dict[item]['git'] in manifest_packages:
- rev_my = todo_dict[item]['rev']
- rev_snapshot = manifest_packages[todo_dict[item]['git']]
+ todo_dict_merge = copy.deepcopy(todo_dict)
+ cnt = 0
+ for item in todo_dict_merge:
+ if 'git' in todo_dict_merge[item] and todo_dict_merge[item]['git'] \
+ and todo_dict_merge[item]['git'] in manifest_packages:
+ rev_my = todo_dict_merge[item]['rev']
+ rev_snapshot = manifest_packages[todo_dict_merge[item]['git']]
if rev_my != rev_snapshot:
print ' >> DIFFER (%s) -> (%s) %s' % (rev_my, rev_snapshot, item)
- todo_dict[item]['rev'] = rev_snapshot
- todo_dict[item]['content'] = todo_dict[item]['content'].replace(rev_my, rev_snapshot)
- todo_dict[item]['exist'] = False
+ todo_dict_merge[item]['rev'] = rev_snapshot
+ todo_dict_merge[item]['content'] = todo_dict_merge[item]['content'].replace(rev_my, rev_snapshot)
+ todo_dict_merge[item]['exist'] = False
+ cnt += 1
+ print 'Differs %d' % cnt
# Remove packages that are already exists which do not need to update
- for k, v in todo_dict.items():
+ for k, v in todo_dict_merge.items():
if 'exist' in v and v['exist'] == True:
- del todo_dict[k]
- return todo_dict
+ del todo_dict_merge[k]
+ return todo_dict_merge
def run_profile_update(self, build, this_project, target):
""" run update profile project """
return errpackages
+
+ def run_manual_sr_sync(self, obs_api, obs_user, obs_passwd,
+ remote_obs_api, remote_obs_user, remote_obs_passwd,
+ gerrit_env,
+ current_ref, new_ref, snapshot_dir):
+ print '---[MANUAL SR SYNC]---'
+
+ remote_package_list = ''
+ todo_dict = {}
+
+ print '\nJOB Started at %s' % (str(datetime.now()))
+
+ #### remote buildservice ####
+ remote_build = BuildService(remote_obs_api, remote_obs_user, remote_obs_passwd)
+ #### target buildservice ####
+ build = BuildService(obs_api, obs_user, obs_passwd, \
+ remote_obs_api, remote_obs_user, remote_obs_passwd)
+
+ src = current_ref
+ target = new_ref
+ build_src = build
+ print "src = %s , target = %s" %(src, target)
+ sys.stdout.flush()
+
+ #TODO: Select matching SR info
+ snapshot_history = get_snapshot_sr_increment(src, target)
+
+ print " 0) Get package list from remote [ %s ]" % (src)
+ remote_package_list = [ p for p in remote_build.get_sourceinfo_list(src) ]
+ print '\nGet Package List from Remote Done at %s' % (str(datetime.now()))
+ if 'patchinfo' in remote_package_list:
+ remote_package_list.remove('patchinfo')
+ print 'Please check patchinfo'
+ sys.stdout.flush()
+
+ print "\n 0-1) copy package list (%d):\n" %(len(remote_package_list))
+ if not build.exists(target):
+ print "\n 0-2) Sorry, target %s does not exist" % (target)
+ sys.exit(-1)
+ todo_dict = self.list_packages_from_remote(build_src, build, src, target)
+
+ print '\nListing from Remote Done at %s' % (str(datetime.now()))
+ print 'todo_dict(%d):' % (len(todo_dict))
+
+ if True:
+ package_list = [ x for x in todo_dict ]
+ print "\n********"
+ #print " 1) package list of target project \n %s" %(package_list)
+
+ #FIXME:
+ profile = {'project': target.split(':ref:')[0],
+ 'baseproject': None,
+ 'snapshot_url': os.path.dirname(os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"))),
+ 'snapshot_username': os.getenv("REF_SNAPSHOT_USERNAME",''),
+ 'snapshot_password': os.getenv("REF_SNAPSHOT_PASSWORD",'')}
+
+ manifest_packages = self.get_manifest_filelists_snapshot(profile, \
+ os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"), \
+ snapshot_dir, \
+ "builddata/manifest"))
+
+ print '2-0) Manifest packages %d' % len(manifest_packages)
+ print profile
+ print os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"), \
+ snapshot_dir, \
+ "builddata/manifest")
+ for x in manifest_packages:
+ print "%s > %s" % (x, manifest_packages[x])
+ sys.stdout.flush()
+
+ # Changed git repo check
+ print '\n\n'
+ git_changed_packages = []
+ for x in todo_dict:
+ if todo_dict[x].get('git', None) and todo_dict[x].get('git') not in manifest_packages:
+ print 'Git repository change detected! %s' % todo_dict[x]
+ sys.stdout.flush()
+ git_changed_packages.append(x)
+ package_list.remove(x)
+ if x in remote_package_list:
+ remote_package_list.remove(x)
+ print '\n\n'
+
+ packages = self.remove_duplicates(package_list, remote_package_list)
+ packages.extend(git_changed_packages)
+ print "\n********"
+ print " 2) remove package %s" %(packages)
+ for pkgname in packages:
+ del todo_dict[pkgname]
+ sys.stdout.flush()
+
+ packages = self.remove_duplicates(remote_package_list, package_list)
+ print "\n********"
+ print " 3) add packages %s" %(packages)
+ if packages:
+ ret_dict_add = self.list_packages_from_remote(\
+ remote_build, build, src, target, packages=packages)
+ if ret_dict_add:
+ todo_dict.update(ret_dict_add)
+ print '\nAdd Remove Done at %s' % (str(datetime.now()))
+ sys.stdout.flush()
+
+ print "\n********"
+ print " 4) compare package project "
+ todo_dict_latest = self.compare_with_manifest(todo_dict, manifest_packages)
+ print '\nCompare With Manifest Done at %s' % (str(datetime.now()))
+ sys.stdout.flush()
+ for td in todo_dict:
+ if todo_dict_latest[td].get('rev') == todo_dict[td].get('rev'):
+ del todo_dict_latest[td]
+
+ print '\n 4-1) Final packages to be updated %d' % len(todo_dict_latest)
+ for td in todo_dict_latest:
+ sys.stdout.flush()
+ print "%s > %s" % (todo_dict_latest[td].get('git'), todo_dict_latest[td].get('rev'))
+
+ # Select matching SR info
+ todo_dict_repos = [ todo_dict_latest[x].get('git') for x in todo_dict_latest ]
+ tgt = src.split(':%s:' % target.split(':ref:')[0])[0]
+ request_info = {'source': target.split(':ref:')[0],
+ 'target': tgt,
+ 'project_to_create_timestamp': datetime.utcnow().strftime("%Y%m%d.%H%M%S"),
+ 'sr': []}
+ request_sr_list = []
+ print '\n\nChecking snapshot history...'
+ snapshot_history_count = 0
+ for sh in snapshot_history:
+ b_added = False
+ for sh_packages in sh.get('packages'):
+ if sh_packages.get('repo') in todo_dict_repos:
+ todo_dict_repos.remove(sh_packages.get('repo'))
+ print 'Adding %s' % sh_packages.get('repo')
+ b_added = True
+ sys.stdout.flush()
+ if len(sh.get('packages')) > 0 and sh not in request_info.get('sr'):
+ snapshot_history_count += 1
+ else:
+ print 'Empty packages %s' % sh
+ if b_added == True:
+ request_sr_list.append(sh)
+ print 'snapshot_history_count = %d' % snapshot_history_count
+
+ # Remove duplicated repo
+ print '\n\nChecking duplicated entries...'
+ reqinfo_count = 0
+ todo_dict_repos = []
+ for reqinfo in request_sr_list:
+ print reqinfo.get('sr'), "\n======="
+ start_index = 0
+ debug_cnt = 0
+ while True:
+ debug_cnt += 1
+ if debug_cnt >= 1000: assert False
+ prev_start_index = start_index
+ for index in range(start_index, len(reqinfo.get('packages'))):
+ repo = reqinfo.get('packages')[index].get('repo')
+ cid = reqinfo.get('packages')[index].get('cid')
+ if repo in todo_dict_repos:
+ print "-", repo, cid
+ del reqinfo.get('packages')[index]
+ del reqinfo.get('submitters')[index]
+ start_index = index + 1
+ break
+ else:
+ todo_dict_repos.append(repo)
+ print "+", repo, cid
+ reqinfo_count += 1
+ sys.stdout.flush()
+ if start_index == prev_start_index: break
+ if start_index > len(reqinfo.get('packages')): break
+ start_index -= 1
+ print 'reqinfo_count = %d' % reqinfo_count
+
+ # Ordering by ASC
+ request_sr_list.reverse()
+ print 'Final empty list...'
+ print [ x for x in request_sr_list if len(x.get('packages')) <= 0 ]
+ request_sr_list = [ x for x in request_sr_list if len(x.get('packages')) > 0 ]
+ print 'Final count : %d' % sum([ len(x.get('packages')) for x in request_sr_list ])
+
+ request_info['sr'] = request_sr_list
+
+ trigger_next('SUBMIT-REQUEST-SYNC', request_info, show=False)
+
+ print '\n\nFinal data to be sync...'
+ rqinfo_count = 0
+ for rqinfo in request_info.get('sr'):
+ rqinfo_count += len(rqinfo.get('packages'))
+ print 'rqinfo_count = %d' % rqinfo_count
+
+ # Check final and reqinfo
+ print '\n\nChecking increment diff...'
+ todo_dict_repos = [ todo_dict_latest[x].get('git') for x in todo_dict_latest ]
+ for rqinfo in request_info.get('sr'):
+ for q in rqinfo.get('packages'):
+ if q.get('repo') not in todo_dict_repos:
+ print 'EXTRA %s' % q
+
+ print "\n********"
+ print " 7) Sync Done..."
+
+ return 0
+
def run_ref_create_project_obs(self, obs_api, obs_user, obs_passwd,
remote_obs_api, remote_obs_user, remote_obs_passwd,
gerrit_env, fields=None, copy_person=False):
def run_copy_project_obs(self, obs_api, obs_user, obs_passwd,
remote_obs_api, remote_obs_user, remote_obs_passwd,
- gerrit_env, fields=None, copy_person=False):
+ gerrit_env, fields=None, copy_person=copy_person):
""" copy project """
print '---[JOB STARTED]-------------------------'
print('-----[JOB STARTED: ref_create_project_obs ]-----')
for loop in range(1):
- try:
+ #try:
if action == 'create' or action is None:
remote_obs_api = obs_api = os.getenv("REF_TARGET_OBS_API_URL")
remote_obs_api, remote_obs_user, remote_obs_passwd,
gerrit_env, fields, copy_person=False)
+ elif action == 'trigger_manual_sr_sync':
+
+ obs_api = os.getenv("REF_TARGET_OBS_API_URL")
+ obs_user = os.getenv("REF_TARGET_OBS_API_USERNAME")
+ obs_passwd = os.getenv("REF_TARGET_OBS_API_PASSWD")
+
+ # default value is null
+ if os.getenv("REF_GERRIT_NAME"):
+ gerrit_env = GerritEnv(os.getenv("REF_GERRIT_NAME"))
+ else:
+ gerrit_env = GerritEnv("")
+
+ current_ref = os.getenv("CURRENT_REF")
+ new_ref = os.getenv("NEW_REF")
+ snapshot_dir = os.getenv("SNAPSHOT_DIR")
+
+ return self.run_manual_sr_sync(obs_api, obs_user, obs_passwd,
+ obs_api, obs_user, obs_passwd,
+ gerrit_env,
+ current_ref, new_ref, snapshot_dir
+ )
+
else:
print 'not enable action = %s' %(action)
return -1
- except Exception, err:
- print 'run_ef_create_project_obs operation failed, retrying...'
- print err
- raise LocalError("FAIL %s" % (err))
- sleep(5)
+
+ #except Exception, err:
+ # print 'run_ef_create_project_obs operation failed, retrying...'
+ # print err
+ # raise LocalError("FAIL %s" % (err))
+ # sleep(5)
return True
if __name__ == '__main__':
- try:
+# try:
trigger = ref_create_project_obs()
sys.exit(trigger.main(sys.argv[1]))
- except Exception as e:
- print(e)
- sys.exit(1)
+# except Exception as e:
+# print(e)
+# sys.exit(1)