From 5c6a02d0345fa0a0895e32201e62538968d992bf Mon Sep 17 00:00:00 2001 From: SoonKyu Park Date: Wed, 21 Mar 2018 11:42:58 +0900 Subject: [PATCH] Fix error and add new feature on gbs dashboard build 1. Change method to match buildlog file and spec file name 2. Add summary.log after one-repo-arch gbs build to use gbs build results in snapshot creation : gbsfullbuild-image-creation job will not be triggered if gbs build is failed 3. Fix error to find build error reason 4. update-gbs-meta/gbs-ref-fullbuild/gbsfullbuild-create-snapshot functionality will be alternatively done by gbs dashboard build 5. Fix job_buildmonitor.py to handle gbs dashboard build with same tag name 6. Add current time to 'gbs release build' tag name 7. Handle 'repo init/sync' failure case : as-is : job_gbsdbbuild_one_repoarch_build return fail : to-be : job_gbsdbbuild_one_repoarch_build return success but mark repo init/sync failure reason Change-Id: Idd055cd94e194e7d2bad342b0254fa480eaab7ef --- common/gbsutils.py | 61 +++- debian/jenkins-scripts.install | 1 + job_buildmonitor.py | 12 +- job_gbs_build_dispatcher.py | 45 ++- job_gbsdbbuild_create_snapshot.py | 78 +++-- job_gbsdbbuild_one_repoarch_build.py | 14 +- job_gbsdbbuild_update_meta.py | 599 +++++++++++++++++++++++++++++++++++ packaging/jenkins-scripts.spec | 1 + 8 files changed, 769 insertions(+), 42 deletions(-) create mode 100755 job_gbsdbbuild_update_meta.py diff --git a/common/gbsutils.py b/common/gbsutils.py index 84fdd78..5f6f201 100644 --- a/common/gbsutils.py +++ b/common/gbsutils.py @@ -33,6 +33,8 @@ from common.buildtrigger import remote_jenkins_build_job, trigger_next from urllib import quote_plus from datetime import datetime +SUMMARY_LOG='summary.log' + class RuntimeException(Exception): """Local error handler""" pass @@ -80,6 +82,16 @@ def find_info_from_one_buildlog(buildlog_file,spec_file_name): print "spec_file_name: %s buildtime: %s built_rpm_files: %s" %(spec_file_name,buildtime,built_rpm_files) return (buildtime,built_rpm_files) +def find_spec_name_from_build_log(buildlog_file): + cmd = 'tail -100 '+buildlog_file + buildlog_tail=subprocess.check_output(cmd,shell=True) + + spec_name = '' + for each_line in buildlog_tail.split('\n'): + if each_line.find('finished "build ') != -1 or each_line.find('failed "build ') != -1: + spec_file = each_line.split('"build ')[1].split('.spec')[0] + return spec_file + class GBSBuild: """A class which supports with statement""" @@ -311,7 +323,8 @@ class GBSBuild: replaced_manifest_content = '' for each_line in manifest_content.split('\n'): for git_name in build_pkg_list_replace.keys(): - if each_line.find(git_name) != -1: +# if each_line.find(git_name) != -1: + if each_line.find('')[1] - succeeded_pkg_num=summary.split('')[2] - export_err_pkg_num=summary.split('')[3] - expansion_err_pkg_num=summary.split('')[4] - build_err_pkg_num=summary.split('')[5] + total_pkg_num=summary.split('')[1].split('')[0] + succeeded_pkg_num=summary.split('')[2].split('')[0] + export_err_pkg_num=summary.split('')[3].split('')[0] + expansion_err_pkg_num=summary.split('')[4].split('')[0] + build_err_pkg_num=summary.split('')[5].split('')[0] if total_pkg_num == succeeded_pkg_num: status = 'S' @@ -519,6 +532,12 @@ class GBSBuild: print 'GBS Build status: %s, status_reason: %s' %(status,status_reason) + summary_log_file=self.build_root+'/local/repos/'+profile_path+'/'+arch+'/'+SUMMARY_LOG + summary_log_content='GBS Build status: "%s", status_reason: "%s"' %(status,status_reason) + f = open(summary_log_file,'wb') + f.write(summary_log_content) + f.close() + return (status,status_reason) @@ -543,7 +562,10 @@ class GBSBuild: # repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('PUBLIC_GERRIT_HOSTNAME'), os.getenv('GERRIT_SSHPORT')) #add '-b' option - repo_init_arg += ' -b '+self.t_branch + if self.gbsbuild_tag.startswith('REF'): + repo_init_arg += ' -b '+self.t_branch+'_staging' + else: + repo_init_arg += ' -b '+self.t_branch #add '-m' option repo_init_arg += ' -m '+self.profile+'_'+repo+'.xml' @@ -720,6 +742,13 @@ class GBSBuild: os.mkdir(buildlogs_repository) for gbs_build_arch in os.listdir(build_result_repo_path): os.mkdir(os.path.join(buildlogs_repository,gbs_build_arch)) + #index.html : overall gbs build status + gbs_build_summary_src = os.path.join(build_result_repo_path,gbs_build_arch,SUMMARY_LOG) + gbs_build_summary_dest = os.path.join(buildlogs_repository,gbs_build_arch,SUMMARY_LOG) + print 'copy summary.log\nsrc file: %s, dest file: %s'\ + %(gbs_build_summary_src,gbs_build_summary_dest) + shutil.copy(gbs_build_summary_src,gbs_build_summary_dest) + success_log_dest=os.path.join(buildlogs_repository,gbs_build_arch,'succeeded') fail_log_dest=os.path.join(buildlogs_repository,gbs_build_arch,'failed') os.mkdir(success_log_dest) @@ -729,13 +758,19 @@ class GBSBuild: fail_log_root=os.path.join(build_result_repo_path,gbs_build_arch,'logs/fail') for success_pkg in os.listdir(success_log_root): src_file=success_log_root+'/'+success_pkg+'/log.txt' - print 'success_log_dest: %s ,success_pkg: %s' %(success_log_dest,success_pkg) - dest_file=success_log_dest+'/'+re.sub('-\d+|.\d+','',success_pkg)+'.buildlog.txt' + spec_name=find_spec_name_from_build_log(src_file) +# print 'success_log_dest: %s ,success_pkg: %s' %(success_log_dest,success_pkg) + dest_file=success_log_dest+'/'+spec_name+'.buildlog.txt' +# dest_file=success_log_dest+'/'+re.sub('-\d+|.\d+','',success_pkg)+'.buildlog.txt' + print 'success_pkg: %s, spec_name: %s, dest_file: %s' %(success_pkg,spec_name,dest_file) shutil.copy(src_file,dest_file) #failed packages for fail_pkg in os.listdir(fail_log_root): src_file=fail_log_root+'/'+fail_pkg+'/log.txt' - dest_file=fail_log_dest+'/'+re.sub('-\d+|.\d+','',fail_pkg)+'.buildlog.txt' + spec_name=find_spec_name_from_build_log(src_file) +# dest_file=fail_log_dest+'/'+re.sub('-\d+|.\d+','',fail_pkg)+'.buildlog.txt' + dest_file=fail_log_dest+'/'+spec_name+'.buildlog.txt' + print 'fail_pkg: %s, spec_name: %s, dest_file: %s' %(fail_pkg,spec_name,dest_file) shutil.copy(src_file,dest_file) #Remove folder if there is no file in arch directory if len(os.walk(success_log_dest).next()[2]) == 0: @@ -780,7 +815,8 @@ class GBSBuild: # if buildlog_file in succeeded_build_log_list: if os.path.exists(os.path.join(build_log_dir,'succeeded')): for buildlog_file in os.listdir(os.path.join(build_log_dir,'succeeded')): - if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file): +# if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file): + if spec_buildlog_name == buildlog_file: build_status = 'S' (build_time, built_rpm_files) = find_info_from_one_buildlog(os.path.join(build_log_dir,'succeeded',buildlog_file),spec_file) for rpm_file_name in built_rpm_files: @@ -793,7 +829,8 @@ class GBSBuild: rpm_file_info[-1]['status_reason']='' if os.path.exists(os.path.join(build_log_dir,'failed')): for buildlog_file in os.listdir(os.path.join(build_log_dir,'failed')): - if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file): +# if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file): + if spec_buildlog_name == buildlog_file: build_status = 'F' status_reason = 'Build Failed' rpm_file_info.append({'git_repository':git_repository}) diff --git a/debian/jenkins-scripts.install b/debian/jenkins-scripts.install index 3c2ab5b..38abb05 100644 --- a/debian/jenkins-scripts.install +++ b/debian/jenkins-scripts.install @@ -51,3 +51,4 @@ debian/tmp/job_obs_project_manager.py /var/lib/jenkins/jenkins-scripts/ debian/tmp/job_gbs_dashboard_build.py /var/lib/jenkins/jenkins-scripts/ debian/tmp/job_gbsdbbuild_create_snapshot.py /var/lib/jenkins/jenkins-scripts/ debian/tmp/job_gbs_build_dispatcher.py /var/lib/jenkins/jenkins-scripts/ +debian/tmp/job_gbsdbbuild_update_meta.py /var/lib/jenkins/jenkins-scripts/ diff --git a/job_buildmonitor.py b/job_buildmonitor.py index 17b38e0..78dd696 100644 --- a/job_buildmonitor.py +++ b/job_buildmonitor.py @@ -1558,7 +1558,8 @@ def update_gbsdbbuild_target(content): #and arch_id from build_arch_name query = "SELECT gbp.id, brn.id, ban.id "\ "FROM gbs_build_project gbp, build_repository_name brn, build_arch_name ban "\ - "WHERE gbp.gbsbuild_tag= %s and brn.repository= %s and ban.arch = %s" + "WHERE gbp.gbsbuild_tag= %s and brn.repository= %s and ban.arch = %s "\ + "ORDER BY gbp.id DESC LIMIT 1" query_data = (gbsbuild_tag,repository,architecture) query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data) (gbs_build_project_id,repository_id,arch_id) = query_result[0] @@ -1589,7 +1590,8 @@ def update_gbs_build_package(content): query = "SELECT gbt.id FROM gbs_build_target gbt, "\ "gbs_build_project gbp, build_repository_name brn, build_arch_name ban "\ "WHERE gbp.gbsbuild_tag = %s and brn.repository = %s and ban.arch = %s " \ - "and gbt.gbs_build_project_id = gbp.id and gbt.repository = brn.id and gbt.arch = ban.id" + "and gbt.gbs_build_project_id = gbp.id and gbt.repository = brn.id and gbt.arch = ban.id "\ + "ORDER BY gbp.id DESC LIMIT 1" query_data = (gbsbuild_tag,bm_repo,bm_arch) gbs_build_target_id = buildmonitor_db.get_value_from_query_data(query, query_data) print "gbs_build_target_id: %s" %gbs_build_target_id @@ -1632,7 +1634,8 @@ def update_gbsdbbuild_snapshot(content): gbsbuild_tag = content.get('gbsbuild_tag') #get gbs_build_project_id - query = "SELECT id FROM gbs_build_project WHERE gbsbuild_tag = %s" + query = "SELECT id FROM gbs_build_project WHERE gbsbuild_tag = %s "\ + "ORDER BY id DESC LIMIT 1" query_data = (gbsbuild_tag,) gbs_build_project_id = buildmonitor_db.get_value_from_query_data(query, query_data) @@ -1669,7 +1672,8 @@ def update_gbsdbbuild_image(content): #get gbs_build_snapshot_id and repository_id query = "SELECT gbs.id, brn.id FROM gbs_build_snapshot gbs, build_repository_name brn "\ - "WHERE gbs.snapshot_name = %s and brn.repository = %s" + "WHERE gbs.snapshot_name = %s and brn.repository = %s "\ + "ORDER by gbs.id DESC LIMIT 1" query_data = (build_id,repository) query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data) (gbs_build_snapshot_id,repository_id) = query_result[0] diff --git a/job_gbs_build_dispatcher.py b/job_gbs_build_dispatcher.py index 363c452..c24bbfc 100755 --- a/job_gbs_build_dispatcher.py +++ b/job_gbs_build_dispatcher.py @@ -87,8 +87,8 @@ def release_snapshot(contents): def get_snapshot_id(obs_prj): #psk temprary for building test - #curr_date=datetime.today().strftime("%Y%m%d") - curr_date='20180305' + curr_date=datetime.today().strftime("%Y%m%d") + #curr_date='20180305' basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/' profile = obs_prj.replace("Tizen:","").replace(":","-").lower() profile_url = basic_url+profile+'/' @@ -110,8 +110,9 @@ def get_snapshot_id(obs_prj): last_snapshot_num = snapshot_num snapshot_num_used = curr_date+"."+last_snapshot_num + curr_time = datetime.now().strftime('%Y%m%d.%H%M%S') snapshot_id=snapshot_id_prefix+"_"+snapshot_num_used - tag_name="RELEASE/"+snapshot_id_prefix.upper()+"/"+snapshot_num_used + tag_name="RELEASE/"+snapshot_id_prefix.upper()+"/"+snapshot_num_used+'/'+curr_time return (snapshot_id,tag_name) @@ -135,6 +136,40 @@ def release_build(): gbs_update_dashboard(bypass_data, via='direct_call') trigger_next("gbs-dashboard-build_%s" % tag.replace('/', '_'), bypass_data) +def ref_fullbuild(contents): + print "Start reference snapshot fullbuild" + + build_profile_snapshot_id = contents.get("build_profile_snapshot_id") + snapshot = build_profile_snapshot_id['snapshot_id'] + obs_prj = build_profile_snapshot_id['obs_prj'] + snapshot_num = snapshot.split('_')[-1] + curr_time = datetime.now().strftime('%Y%m%d.%H%M%S') + tag = 'REF/'+obs_prj.replace(':','-').upper()+'/'+snapshot_num+'/'+curr_time + print "snapshot:%s tag:%s" %(snapshot,tag) + + bypass_data = { + "gbs_type" : "Release Build", + "tag" : tag, + "source_snapshot": snapshot, + "main_project" : obs_prj, + "mode" : "queued", + } +# submitter='sk7.park@samsung.com' +# transform_packages=[{'platform/core/dotnet/build-tools': {'commit': '5bc00509597ae289c30bf4498bbb96e51dfe13b5', 'image': 'include', 'build': 'include'}}] +# bypass_data = { +# "gbs_type" : "Dependency Build", +# "tag" : tag, +# "source_snapshot": snapshot, +# "main_project" : obs_prj, +# "mode" : "queued", +# "submitter" : contents.get("user_email"), +# "packages" : transform_packages +# } + + gbs_update_dashboard(bypass_data, via='direct_call') + trigger_next("gbs-dashboard-build_%s" % tag.replace('/', '_'), bypass_data) + + def main(): """The main body""" @@ -153,6 +188,10 @@ def main(): release_snapshot(contents) return + if contents.get("menu", None) == "Ref Fullbuild": + ref_fullbuild(contents) + return + transform_packages = [] for x in contents.get("repo_commit"): transform_packages.append( diff --git a/job_gbsdbbuild_create_snapshot.py b/job_gbsdbbuild_create_snapshot.py index d177f7d..e538e38 100755 --- a/job_gbsdbbuild_create_snapshot.py +++ b/job_gbsdbbuild_create_snapshot.py @@ -67,7 +67,7 @@ def replace_ks_file_for_gbsdbbuild(kickstart,build_id,gbsbuild_trigger_category) repo_name=line.split('--name=')[1].split(' ')[0] orig_line=line.replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/repos/')+' --priority=99\n' if gbsbuild_trigger_category.lower() == 'release build': - add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/releases/tizen/').replace('/@BUILD_ID@/repos/','/'+build_id+'/repos/')+' --priority=1' + add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/releases/tizen/').replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/'+build_id+'/repos/')+' --priority=1' else: add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/tizen/').replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/'+build_id+'/repos/')+' --priority=1' if gbsbuild_trigger_category.lower() == 'dependency build': @@ -78,6 +78,22 @@ def replace_ks_file_for_gbsdbbuild(kickstart,build_id,gbsbuild_trigger_category) return replaced_ks +def get_overall_build_status(live_buildlog_path,gbsbuild_dic): + overall_build_status = 'S' + for repo in gbsbuild_dic['repository']: + for arch in gbsbuild_dic['arch_list'][repo]: + summary_log_file=os.path.join(live_buildlog_path,repo,arch,'summary.log') + f = open(summary_log_file,'rb') + summary_log_contents = f.read() + f.close() + print 'summary_log_file: %s, summary_log_contents: %s'\ + %(summary_log_file,summary_log_contents) + #if one of repo-arch build fails, overall build status will be 'F' + if summary_log_contents.split('GBS Build status: "')[1].split('"')[0] == 'F': + overall_build_status = 'F' + return overall_build_status + + return overall_build_status def prepare_trigger_data(images, build_id, path_repo, project, url_pub_base,gbsbuild_tag,gbsbuild_trigger_category): @@ -195,16 +211,13 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic): # local_targets = snapshot.targets # local_path = snapshot.path # local_dir = snapshot.dir - if gbsbuild_dic['gbsbuild_tag'].startswith("RELEASE"): - local_build_id = gbsbuild_dic['snapshot_build_id'] - local_targets = create_local_targets(gbsbuild_dic) + if gbsbuild_dic['trigger_category'].lower() == 'release build': local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'releases/tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile']) - local_path = os.path.join(base_path,local_dir,local_build_id) else: - local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1] - local_targets = create_local_targets(gbsbuild_dic) local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile']) - local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id) + local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[-1] + local_targets = create_local_targets(gbsbuild_dic) + local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id) # Delete Snapshot() instance. # del snapshot @@ -289,10 +302,11 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic): os.getenv('GERRIT_REVIEW_URL'), live_repo_path, repo['Architectures']) - if gbsbuild_dic['trigger_category'].lower() == 'release build': - repo_path = os.path.join(local_dir,local_build_id) - else: - repo_path = os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id) +# if gbsbuild_dic['trigger_category'].lower() == 'release build': +# repo_path = os.path.join(local_dir,local_build_id) +# else: +# repo_path = os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id) + repo_path = os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id) return {'project': project, 'repo': repos, 'repo_path': repo_path, @@ -317,16 +331,15 @@ def main(): fields = trigger_info(os.getenv("TRIGGER_INFO")) gbsbuild_dic = fields['gbsbuild_dic'] - snapshot_name = gbsbuild_dic['snapshot_build_id'] + snapshot_name = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[-1] project = gbsbuild_dic['obs_prj'] - if gbsbuild_dic['gbsbuild_tag'].startswith("RELEASE"): - snapshot_name = gbsbuild_dic['snapshot_build_id'] +# if gbsbuild_dic['gbsbuild_tag'].startswith("RELEASE"): + if gbsbuild_dic['trigger_category'].lower() == 'release build': snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\ 'releases/tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\ - snapshot_name) + gbsbuild_dic['snapshot_build_id'],snapshot_name) else: - snapshot_name = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1] snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\ 'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\ gbsbuild_dic['snapshot_build_id'],snapshot_name) @@ -350,7 +363,7 @@ def main(): else: build_id_liverepo=os.path.join(build_id_liverepo,subdir) print "build_id_liverepo = %s" %build_id_liverepo - cmd = 'rm -rf `find '+build_id_liverepo+\ + cmd = 'sudo rm -rf `find '+build_id_liverepo+\ ' ! -name '+os.path.basename(build_id_liverepo)+\ ' | grep -v '+gbsbuild_dic['gbsbuild_tag'].split('/')[-1]+'`' print "clear live root command: %s" %cmd @@ -380,8 +393,10 @@ def main(): repo_data['repo_path'], project, base_url, gbsbuild_dic['gbsbuild_tag'], gbsbuild_dic['trigger_category']) + overall_gbsbuild_status = get_overall_build_status(os.path.join(build_id_liverepo,gbsbuild_dic['gbsbuild_tag'].split('/')[-1],'buildlogs'),gbsbuild_dic) # trigger image creation jobs - trigger_image_creation(trigger_data) + if overall_gbsbuild_status == 'S': + trigger_image_creation(trigger_data) # trigger post snapshot creation job with repo data data = repo_data.copy() @@ -413,6 +428,31 @@ def main(): } trigger_next("BUILD-MONITOR_%s" %bm_stage, bm_data) + # TRIGGER NEXT BUILD-MONITOR-Success (Success or Build failed) + # In case of build failed, create-snapshot status will be marked as success + # In case of image creation failed, create-snpahost status will be marked as failed + if overall_gbsbuild_status == 'S': + update_message="Succeeded" + else: + update_message="Build Failed" + if len(update_message) < 119: + trigger_next("BUILD-MONITOR-Success", \ + {'bm_stage':'GBSFULLBUILD_SNAPSHOT', + 'snapshot_name':gbsbuild_dic['snapshot_build_id'], + 'gbsfullbuild_string': update_message}) + + # TRIGGER NEXT BUILD-MONITOR-Fail + update_message="Image Creation Failed" + if len(update_message) < 119: + trigger_next("BUILD-MONITOR-Failed", \ + {'bm_stage':'GBSFULLBUILD_SNAPSHOT', + 'snapshot_name':gbsbuild_dic['snapshot_build_id'], + 'gbsfullbuild_string': update_message}) + + if gbsbuild_dic["gbsbuild_tag"].startswith('REF') and overall_gbsbuild_status == 'S': + gbsmeta_data = {"triggered_from": 'gbsdbbuild_create_snapshot' + } + trigger_next("gbsdbbuild-update-meta",gbsmeta_data) if __name__ == '__main__': try: diff --git a/job_gbsdbbuild_one_repoarch_build.py b/job_gbsdbbuild_one_repoarch_build.py index 98a571d..8831c43 100644 --- a/job_gbsdbbuild_one_repoarch_build.py +++ b/job_gbsdbbuild_one_repoarch_build.py @@ -110,10 +110,10 @@ def trigger_bm_for_gbs_build_package(gbsbuild_tag,bm_pkg_info,bm_repo,bm_arch): def update_db_for_gbsbuild_target(repository,architecture,bm_start_datetime,\ - gbs_build_status,gbsbuild_tag): + gbs_build_status,gbsbuild_tag, status_reason=None): gbs_update_data = {"tag" : gbsbuild_tag, "mode" : "build_finished", - "reason" : {"repo":repository,"arch":architecture,"status":gbs_build_status}} + "reason" : {"repo":repository,"arch":architecture,"status":gbs_build_status, "comment":status_reason}} gbs_update_dashboard(gbs_update_data,via='direct_call') bm_end_datetime = datetime.datetime.now() @@ -154,6 +154,10 @@ def main(): original_dir=os.getcwd() + # clear files in /var/tmp/* where redundant files can be undeleted in case of abnormal gbs build break + cmd = 'sudo rm -rf /vat/tmp/*' + subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True) + _check_mount_existance(gbsbuild_dic['build_root']) if os.path.exists(gbsbuild_dic['build_root']): @@ -180,11 +184,13 @@ def main(): print 'OBS Project: %s, repository: %s, architecture: %s gbs build start'\ %(gbsbuild_dic['obs_prj'],repository,architecture) (repo_init_sync_status,repo_init_sync_status_reason) = gbsbuild_sub._do_repo_init_sync(repository) + print repo_init_sync_status + print repo_init_sync_status_reason # if repo init or repo sync failed, do not perform gbs build, and just update dashboard if repo_init_sync_status == 'F': if buildmonitor_enabled: update_db_for_gbsbuild_target(repository,architecture,bm_start_datetime,\ - repo_init_sync_status,gbsbuild_dic['gbsbuild_tag']) + repo_init_sync_status,gbsbuild_dic['gbsbuild_tag'], status_reason=repo_init_sync_status_reason) return 0 (gbs_build_status,gbs_build_status_reason) = gbsbuild_sub._do_repo_arch_gbs_fullbuild(repository,architecture) @@ -195,7 +201,7 @@ def main(): if buildmonitor_enabled: update_db_for_gbsbuild_target(repository,architecture,bm_start_datetime,\ - gbs_build_status,gbsbuild_dic['gbsbuild_tag']) + gbs_build_status,gbsbuild_dic['gbsbuild_tag'], status_reason = gbs_build_status_reason) bm_pkg_info=gbsbuild_sub.get_rpm_info_from_gbs_build_log(live_out_dir,repository,architecture) print "bm_pkg_info: %s" %bm_pkg_info diff --git a/job_gbsdbbuild_update_meta.py b/job_gbsdbbuild_update_meta.py new file mode 100755 index 0000000..f2cbbfd --- /dev/null +++ b/job_gbsdbbuild_update_meta.py @@ -0,0 +1,599 @@ +#!/usr/bin/env python + +import sys +import os +import tempfile +import atexit +import shutil +import urllib2 +import gzip +import re +from common.git import Git, clone_gitproject +from common.gerrit import Gerrit, get_gerrit_event, GerritError, is_ref_deleted +from common.buildtrigger import trigger_info, trigger_next + +# prepare related global variables +workspace = os.getenv('WORKSPACE') +basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/' +public_basic_url = 'http://download.tizen.org/snapshots/tizen/' +daily_release_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/releases/daily/tizen/' +public_daily_release_url = 'http://download.tizen.org/releases/daily/tizen/' +gbs_meta_default_profile = os.getenv('GBS_META_DEFAULT_PROFILE') +gbs_meta_support_tizen_ver = os.getenv('GBS_META_SUPPORT_TIZEN_VER') +#GIT_REF_MAPPING_BRANCH = 'sandbox/soong9/unified' +GIT_REF_MAPPING_BRANCH = 'master' + +def __get_index_from_path_prj_arch(path_prj_arch): + """ Get base repo url from obs project & repository""" + print "-------__get_base_repo_url start-----------------" + + base_repo = {} + obs_prj=path_prj_arch.split('##')[0] + path_arch=path_prj_arch.split('##')[1] + path_prj=obs_prj.replace('Tizen:','').replace(':','-').lower() + + if obs_prj.find(':ref:') != -1: + ref_build_id=obs_prj.split(':ref:')[1] + obs_prj=obs_prj.split(':ref:')[0] + snapshot_id=obs_prj.replace('Tizen:','').replace(':','-').lower() + temp_id = '/'+snapshot_id+'/tizen-'+snapshot_id+'_'+ref_build_id+'/repos/'+path_arch + else: + snapshot_id=path_prj + temp_id = '/'+snapshot_id+'/latest/repos/'+path_arch + + url_candidates = [] + url_candidates.append(basic_url+temp_id) + url_candidates.append(daily_release_url+temp_id) + url_candidates.append(public_basic_url+temp_id) + url_candidates.append(public_daily_release_url+temp_id) + for url in url_candidates: + try: + urllib2.urlopen(url) + except: + continue + else: + base_url = url + break + + print 'obs_prj: %s, snapshot_id: %s' %(obs_prj,snapshot_id) + print 'path_prj: %s, path_arch: %s, base_url: %s' %(path_prj,path_arch,base_url) + + base_repo['repo_name']='repo.'+path_prj+'_'+path_arch + base_repo['debug_repo_name']='repo.'+path_prj+'_'+path_arch+'_debug' + base_repo['repo_url']=base_url+'/packages/' + base_repo['debug_repo_url']=base_url+'/debug/' + + print 'base_repo: %s' %base_repo + return base_repo + + +def _update_ref_bin_index(ref_binary): + """ Update Reference Binary Index""" + print "-----------------------------------------------------" + print "Update Reference Binary Index" + print "-----------------------------------------------------" + + ref_list = [] + + # prepare separate temp directory for each build + git_prj = 'scm/meta/obs' + git_branch="master" + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + + for obs_prj in ref_binary.keys(): + print '\nobs_prj: %s' %obs_prj + + if len(re.findall('\D', obs_prj.split(':')[1].replace('.',''))) != 0: + t_ver = '' + t_ver_path = '/' + profile_array = obs_prj.split(':')[1:] + else: + t_ver = obs_prj.split(':')[1] + '-' + t_ver_path = '/' + obs_prj.split(':')[1] + '/' + profile_array = obs_prj.split(':')[2:] + #find profile name + i=0 + profile = '' + while i < len(profile_array): + profile += profile_array[i].lower() + '-' + i += 1 + profile = profile.rstrip('-') + + print 'psk-test' + print obs_prj + print ref_binary + ref_snapshot_number = ref_binary[obs_prj].split('ref:')[1] + if ref_snapshot_number == 'latest': + ref_snapshot_url = basic_url + t_ver + profile + "/" + ref_snapshot_number + "/" + else: + ref_snapshot_url = basic_url + t_ver + profile + "/" + "tizen-" + t_ver + profile + "_" + ref_snapshot_number + "/" + try: + urllib2.urlopen(ref_snapshot_url) + except: + ref_snapshot_url = daily_release_url + t_ver + profile + "/" + "tizen-" + t_ver + profile + "_" + ref_snapshot_number + "/" + + repository = [] + path_prj_list= {} + obs_meta_file = prjdir + '/' + obs_prj + '/_meta' + lines = open(obs_meta_file).readlines() + for line in lines: + if line.find('repository name=') != -1: + repo_tmp=line.split('"')[1] + repository.append(repo_tmp) + path_prj_list[repo_tmp] = [] + if line.find('path project=') != -1: + path_prj_arch = line.split('"')[1]+'##'+line.split('"')[3] + path_prj_list[repo_tmp].append(path_prj_arch) + + ref_list.append({'profile':profile}) + ref_list[-1]['t_ver']=t_ver + ref_list[-1]['t_ver_path']=t_ver_path + ref_list[-1]['ref_snapshot_number'] = ref_snapshot_number + ref_list[-1]['ref_snapshot_url'] = ref_snapshot_url + ref_list[-1]['repository'] = repository + ref_list[-1]['path_prj_list'] = path_prj_list + + print 'reference list %s' %ref_list + return ref_list + + +def _update_base_prj_index(): + """ Update Tizen Base Project Index""" + print "-----------------------------------------------------" + print "Update Tizen Base Project Index" + print "-----------------------------------------------------" + + base_prj_list = [] + + # prepare separate temp directory for each build + git_prj = 'scm/meta/obs' + git_branch="master" + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + + latest_t_ver = gbs_meta_support_tizen_ver.split(' ')[0] + for ver in gbs_meta_support_tizen_ver.split(' '): + if ver > latest_t_ver: + latest_t_ver = ver + + for base_ver in gbs_meta_support_tizen_ver.split(' '): + if base_ver == latest_t_ver: + obs_prj = 'Tizen:Base' + t_ver = '' + git_branch = 'tizen' + else: + obs_prj = 'Tizen:'+base_ver+':Base' + t_ver = base_ver+'-' + git_branch = 'tizen_'+base_ver + + + repository = [] + obs_meta_file = prjdir + '/' + obs_prj + '/_meta' + lines = open(obs_meta_file).readlines() + for line in lines: + if line.find('repository name=') != -1: + repo_tmp=line.split('"')[1] + repository.append(repo_tmp) + + base_prj_list.append({'base_ver':base_ver}) + base_prj_list[-1]['t_ver']=t_ver + base_prj_list[-1]['repository'] = repository + base_prj_list[-1]['git_branch'] = git_branch + + print 'base_prj_list = %s' %base_prj_list + return base_prj_list + + +def _update_build_conf(ref_list,commit_msg,is_staging): + """ Update scm/meta/build-config after ref.binary update""" + print "-----------------------------------------------------" + print "Update scm/meta/build-config after ref.binary update" + print "-----------------------------------------------------" + + # prepare separate temp directory for each build + git_prj = 'scm/meta/build-config' + git_branch="tizen"+is_staging + + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + + # Update build.conf file + for ref_profile in ref_list: + if not os.path.exists(prjdir + ref_profile['t_ver_path'] + ref_profile['profile']): + print 'No dir exists for %s. Create it' %(ref_profile['t_ver_path'] + ref_profile['profile']) + os.makedirs(prjdir + ref_profile['t_ver_path'] + ref_profile['profile']) + + for repository in ref_profile['repository']: + repo_build_conf_file = prjdir + ref_profile['t_ver_path'] + ref_profile['profile'] + '/' + repository + '_build.conf' + repodata_url = ref_profile['ref_snapshot_url'] + "repos/" + repository + "/packages/repodata/" + print 'profile: %s, repository:%s' %(ref_profile['profile'], repository) + print 'repodata_url : %s' %repodata_url + # fine url of build.conf.gz file + res = urllib2.urlopen(repodata_url) + for line in res.read().split('a href="'): + if line.find('build.conf.gz') != -1: + build_conf_file=line.split('"')[0] + build_conf_url = repodata_url + build_conf_file + print 'build_conf_url: %s' %build_conf_url + # download xxxx.build.conf.gz file -> decompress -> save it to repo_build.conf inside scm/meta/build-config git + tmp_build_conf_file = tmpdir + '/tmp_build.conf.gz' + f = open(tmp_build_conf_file,'wb') + res2 = urllib2.urlopen(build_conf_url) + f.write(res2.read()) + f.close() + with gzip.GzipFile(tmp_build_conf_file, 'rb') as inF: + with file(repo_build_conf_file, 'wb') as outF: + s = inF.read() + outF.write(s.encode('utf-8')) + + if mygit.is_clean()[0] == 0: + print '%s, branch %s has some changes' %(git_prj,git_branch) + mygit.add_files(prjdir) + mygit.commit_all(commit_msg) + mygit.push(repo = 'origin', src = git_branch) + else: + print '%s, branch %s has nothing to commit' %(git_prj,git_branch) + + +def _update_gbs_conf(ref_list,commit_msg,is_staging,is_fullbuild): + """ Update scm/meta/gbs-config after ref.binary update""" + print "-----------------------------------------------------" + print "Update scm/meta/gbs-config after ref.binary update" + print "-----------------------------------------------------" + + # prepare separate temp directory for each build + git_prj = 'scm/meta/gbs-config' + if is_staging == '_staging' and is_fullbuild == 'false': + git_branch="tizen_ref" + else: + git_branch="tizen"+is_staging + + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + + gbs_conf_file = prjdir+'/gbs.conf' + default_profile = ref_list[0]['profile'] + default_repo = ref_list[0]['repository'][0] + default_t_ver = ref_list[0]['t_ver'] + + #[general] section + gbs_conf_text = '[general]\nfallback_to_native = true\nprofile = profile.'+default_t_ver+default_profile+'_'+default_repo+'\n\n\n' + + #Profile section + gbs_conf_text += '#########################################################\n################## Profile Section ##################\n#########################################################\n\n' + + path_prj_arch_overall_list = [] + for ref_profile in ref_list: + profile = ref_profile['profile'] + t_ver = ref_profile['t_ver'] + t_ver_path = ref_profile['t_ver_path'] + gbs_conf_text = gbs_conf_text + '############# '+ t_ver+profile+' #############\n' + for repository in ref_profile['repository']: + gbs_conf_text += '[profile.'+t_ver+profile+'_'+repository+']\n' + if is_fullbuild == 'true': + gbs_conf_text += 'buildconf=./scm/meta/build-config'\ + +t_ver_path+profile+'/'+repository+'_build.conf\n' + gbs_conf_text += 'repos = ' + for path_prj_arch in ref_profile['path_prj_list'][repository]: + path_prj_arch_overall_list.append(path_prj_arch) + base_repo = __get_index_from_path_prj_arch(path_prj_arch) + gbs_conf_text += base_repo['repo_name']+','+base_repo['debug_repo_name']+',' + if is_fullbuild == 'false': + gbs_conf_text += 'repo.'+t_ver+profile+'_'+repository+',' + gbs_conf_text += 'repo.'+t_ver+profile+'_'+repository+'_debug,' + gbs_conf_text = gbs_conf_text[:-1] + + gbs_conf_text += '\n\n' + + #repo section + gbs_conf_text += '\n\n\n#########################################################\n################## Repo Section##################\n#########################################################\n' + + path_prj_arch_overall_list = list(set(path_prj_arch_overall_list)) + #base repo + gbs_conf_text += '\n############# base #############\n' + + for path_prj_arch in path_prj_arch_overall_list: + base_repo = __get_index_from_path_prj_arch(path_prj_arch) + gbs_conf_text += '['+base_repo['repo_name']+']\n' + gbs_conf_text += 'url = '+base_repo['repo_url']+'\n' + gbs_conf_text += '['+base_repo['debug_repo_name']+']\n' + gbs_conf_text += 'url = '+base_repo['debug_repo_url']+'\n\n' + + #profile repo + for ref_profile in ref_list: + profile = ref_profile['profile'] + t_ver = ref_profile['t_ver'] + ref_snapshot_url = ref_profile['ref_snapshot_url'] + + gbs_conf_text += '\n############# '+ t_ver+profile+' #############\n' + for repository in ref_profile['repository']: + gbs_conf_text += '[repo.'+t_ver+profile+'_'+repository+']\n' + gbs_conf_text += 'url = '+ref_snapshot_url+'repos/'+repository+'/packages/\n' + gbs_conf_text += '[repo.'+t_ver+profile+'_'+repository+'_debug]\n' + gbs_conf_text += 'url = '+ref_snapshot_url+'repos/'+repository+'/debug/\n\n' + + #write gbs_conf_text to default gbs_conf_file, which is scm/meta/gbs-config/gbs.conf + print '==================================================\n=========== default gbs.conf file==========\n==================================================\n' + print gbs_conf_text + with open(gbs_conf_file,"wb") as f: + f.write(gbs_conf_text) + + #Copy default gbs.conf file to each version/profile/repository_gbs.conf file and replace default profile + for ref_profile in ref_list: + profile = ref_profile['profile'] + t_ver = ref_profile['t_ver'] + t_ver_path = ref_profile['t_ver_path'] + + if not os.path.exists(prjdir + t_ver_path + profile): + print 'No dir exists for %s. Create it' %(t_ver_path + profile) + os.makedirs(prjdir + t_ver_path + profile) + + for repository in ref_profile['repository']: + file_path_name = prjdir + t_ver_path + profile + '/' + repository + '_gbs.conf' + print 'filename and path: %s' %file_path_name + shutil.copy(gbs_conf_file,file_path_name) + old_profile='profile = profile.'+default_t_ver+default_profile+'_'+default_repo + new_profile='profile = profile.'+t_ver+profile+'_'+repository + print 'new_profile: %s' %new_profile + with open(file_path_name,"rb") as f: + newText = f.read().replace(old_profile,new_profile) + with open(file_path_name,"wb") as f: + f.write(newText) + + #git push if there are any changes + if mygit.is_clean()[0] == 0: + print '%s, branch %s has some changes' %(git_prj,git_branch) + mygit.add_files(prjdir) + mygit.commit_all(commit_msg) + mygit.push(repo = 'origin', src = git_branch) + else: + print '%s, branch %s has nothing to commit' %(git_prj,git_branch) + + +def _update_scm_manifest(ref_list,base_prj_list,commit_msg,is_staging): + """ Update scm/manifest after ref.binary update""" + print "-----------------------------------------------------" + print "\n\n\nUpdate scm/manifest after ref.binary update" + print "-----------------------------------------------------" + + # prepare separate temp directory for each build + git_prj = 'scm/manifest' + + for base_prj in base_prj_list: + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + + git_branch = base_prj['git_branch']+is_staging + + print 'git_prj: %s git_branch: %s' %(git_prj, git_branch) + mygit.checkout(git_branch) + + #_remote.xml + remote_file = prjdir + '/_remote.xml' + gerrit_hostname = os.getenv('GERRIT_HOSTNAME_EXTERNAL') + remote_text = '\n\n \n' + with open(remote_file,'wb') as f: + f.write(remote_text) + + for ref_profile in ref_list: + if ref_profile['t_ver'] == base_prj['t_ver']: + profile = ref_profile['profile'] + t_ver = ref_profile['t_ver'] + t_ver_path = ref_profile['t_ver_path'] + ref_snapshot_number = ref_profile['ref_snapshot_number'] + ref_snapshot_url = ref_profile['ref_snapshot_url'] + + profile_path = os.path.join(prjdir,profile) + for repository in ref_profile['repository']: + if not os.path.exists(prjdir+'/'+profile+'/'+repository): + print 'No dir exists for %s. Create it' %(profile+'/'+repository) + os.makedirs(prjdir+'/'+profile+'/'+repository) + + #projects.xml + projects_file = profile_path + '/' + repository + '/projects.xml' + + manifest_url = ref_snapshot_url + '/builddata/manifest/' + res = urllib2.urlopen(manifest_url) + for index in res.read().split('a href="'): + if index.find(repository+'.xml') != -1: + manifest_file=index.split('">')[0] + manifest_file_url = manifest_url + manifest_file + print 'projects_file: %s' %projects_file + print 'manifest_file_url: %s' %manifest_file_url + + with open(projects_file,'wb') as f: + res2 = urllib2.urlopen(manifest_file_url) + f.write(res2.read()) + + #metadata.xml + revision_branch='tizen'+is_staging + metadata_file = profile_path + '/' + repository + '/metadata.xml' + metadata_text = '\n\n \n \n \n \n' + print 'repository: %s' %repository + print 'metadata_text: \n%s' %metadata_text + with open(metadata_file,'wb') as f: + f.write(metadata_text) + + #profile_repository.xml + manifest_xml_file = prjdir+'/'+profile+'_'+repository+'.xml' + manifest_text = '\n\n \n \n \n \n\n' + print 'repository: %s' %repository + print 'manifest_text: \n%s' %manifest_text + + with open(manifest_xml_file,'wb') as f: + f.write(manifest_text) + + if mygit.is_clean()[0] == 0: + print '%s, branch %s has some changes' %(git_prj,git_branch) + mygit.add_files(prjdir) + mygit.commit_all(commit_msg) + mygit.push(repo = 'origin', src = git_branch) + else: + print '%s, branch %s has nothing to commit' %(git_prj,git_branch) + + +def prepare_ref_binary(): + """ prepare_ref_binary_by_git""" + print "----------prepare_ref_binary_by_git start--------" + + git_prj = 'scm/git-ref-mapping' + git_branch = GIT_REF_MAPPING_BRANCH + filename = 'git-ref-mapping.xml' + tmpdir = tempfile.mkdtemp(prefix=workspace+'/') + atexit.register(shutil.rmtree, tmpdir) + prjdir = os.path.join(tmpdir, git_prj) + + # clone gerrit project to local dir + if not clone_gitproject(git_prj, prjdir): + print >> sys.stderr, 'Error cloning %s' %git_prj + return 1 + mygit = Git(prjdir) + mygit.checkout(git_branch) + commit_msg=mygit.show(git_branch).split('\n')[4].replace(" ","") + commit_log=mygit.show(git_branch) + + ref_binary = {} + base_prj_list = {} + build_profile_list = [] + build_profile_snapshot_id_list = [] + + lines = open(prjdir+'/'+filename).readlines() + for line in lines: + if line.find('branch OBS_project') != -1: + ref_binary[line.split('"')[1]] = line.split('"')[3] + + #Add default profile supported by gbs + if gbs_meta_default_profile: + for profile in gbs_meta_default_profile.split(' '): + ref_binary[profile]=profile+':ref:latest' + + for line in commit_log.splitlines(): + if line.find('+') != -1 and line.find('branch OBS_project') != -1: + obs_prj=line.split('"')[1] + if obs_prj in build_profile_list: + continue + build_profile_list.append(obs_prj) + snapshot_num=line.split('"')[3].split(':')[-1] + snapshot_id=obs_prj.replace(':','-').lower()+'_'+snapshot_num + build_profile_snapshot_id_list.append({'obs_prj':obs_prj,'snapshot_id':snapshot_id}) + + print 'gbs fullbuild target profiles are :\n%s' %build_profile_snapshot_id_list + + return (ref_binary,commit_msg,build_profile_snapshot_id_list) + + +def main(): + """script entry point""" + + print '---[JOB STARTED]----------------------------------------' + + git_info = {} + #comment out because this job is triggered by gerrit event + event = get_gerrit_event() +# event = trigger_info(os.getenv("TRIGGER_INFO")) + + # prepare separate temp directory for each build + + # In case of this job is triggered by gerrit event, first submit meta files to staging branch + # Otherwise, submit gbs meta files to original branch + if 'project' in event and 'event_type' in event: + print 'update-gbs-meta job is triggered by gerrit event' + is_staging = '_staging' + if event['event_type'] != "ref-updated" or event['project'] != "scm/git-ref-mapping": + # This is just a sanity check as ref-updated is the only event we + # react on and it's configured in the job configuraion + print >> sys.stderr, "Configuration error: This job can't process"\ + "project %s! Only scm/git-ref-mapping is allowed and " \ + "event %s! Only ref-updated events are allowed" \ + %(event['event_type'], event['project']) + return 1 + if event['refname'] != GIT_REF_MAPPING_BRANCH: + print "Configuration error: git branch of scm/git-ref-mapping mismatched!!" + return 1 + else: + print 'gbsdbbuild-update-meta job is triggered by other jenkins job\n\ + gbsdbbuild-create-snapshot' + content = trigger_info(os.getenv("TRIGGER_INFO")) + is_staging = '' + if content['triggered_from'] != 'gbsdbbuild_create_snapshot': + print 'Trigger error: gbsdbbuild-update-meta job is not triggered by gbsdbbuild-create-snapshot!!' + return 1 + + (ref_binary,commit_msg,build_profile_snapshot_id_list) = prepare_ref_binary() + + print "commit_msg='%s'" %commit_msg + print 'Each reference snapshot numbers are like below' + print ref_binary + + ref_list = _update_ref_bin_index(ref_binary) + base_prj_list = _update_base_prj_index() + _update_build_conf(ref_list,commit_msg,is_staging) + _update_gbs_conf(ref_list,commit_msg,is_staging,is_fullbuild='true') + # Update tizen_ref branch in case of _staging only + # tizen_ref branch : can be used by Tizen developer + # who want to do gbs building(not full build) with reference snapshot remote repo + # scm/git-ref-mapping update -> scm/gbs-config _staging branch & tizen_ref branch update + if is_staging == '_staging': + _update_gbs_conf(ref_list,commit_msg,is_staging,is_fullbuild='false') + _update_scm_manifest(ref_list,base_prj_list,commit_msg,is_staging) + + if is_staging == '_staging': + for build_profile_snapshot_id in build_profile_snapshot_id_list: + trigger_next("gbs_build_dispatcher",\ + {"menu" : "Ref Fullbuild", + 'build_profile_snapshot_id': build_profile_snapshot_id}) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/packaging/jenkins-scripts.spec b/packaging/jenkins-scripts.spec index 8b50239..d110dc5 100644 --- a/packaging/jenkins-scripts.spec +++ b/packaging/jenkins-scripts.spec @@ -185,6 +185,7 @@ fi %{destdir}/job_gbs_dashboard_build.py %{destdir}/job_gbsdbbuild_create_snapshot.py %{destdir}/job_gbsdbbuild_one_repoarch_build.py +%{destdir}/job_gbsdbbuild_update_meta.py %files common %defattr(-,jenkins,jenkins) -- 2.7.4