From: jintae.son Date: Wed, 18 Sep 2019 07:14:24 +0000 (+0900) Subject: ADD Base Verification scripts X-Git-Tag: accepted/tizen/devbase/services/20200104.053715~45 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=5323c8b521a235f3165c484acef45059414b2a85;p=services%2Fjenkins-scripts.git ADD Base Verification scripts - applied review patch Change-Id: I6cbd19b4bd5dce1f3eb5235ee40a9c8ecdb7afe5 --- diff --git a/job_base_verification.py b/job_base_verification.py new file mode 100644 index 0000000..f94994b --- /dev/null +++ b/job_base_verification.py @@ -0,0 +1,382 @@ +import os +import re +import sys +import base64 +import json +import requests +import urlparse +import ast +from time import sleep +from datetime import datetime +from datetime import timedelta + +import urllib2 +import copy + +from osc import conf, core +from common.buildservice import BuildService +from common.buildtrigger import trigger_info, trigger_next, remote_jenkins_build_job +from gitbuildsys.errors import ObsError +import xml.etree.ElementTree as ET +import xml.etree.cElementTree as ElementTree +from common.mapping import get_ref_map, get_sync_map_list +from common.gerrit import GerritEnv + + +class LocalError(Exception): + """Local error exception.""" + pass + + +class CreateProjectObs(object): + """ create project to obs + PreRelease Project fotmat is : + home:prerelease::submit:basechecker """ + + def post_project_create(self, trigger_info, project_name): + content = trigger_info + + bm_start_datetime = datetime.now() + # get vars + bm_stage = "Sync_SR_Submit_BaseCheck" + bm_sync_git_tag = content.get('virtual_sr') + bm_end_datetime = datetime.now() + triggered_by = content.get('submitter') + project_to_create = project_name + bm_data = {"bm_stage": bm_stage, + "commit_date": str(bm_end_datetime), + "commit_msg": "N/A", + "submit_date": str(bm_end_datetime), + "submit_msg": "N/A", + "submitter": "<%s>" % triggered_by, + # "bm_member_sr_info" : {project_to_create: requests_item[project_to_create][r:(r+split_step)]}, + "bm_sync_git_tag": bm_sync_git_tag, + "gerrit_project": "N/A", + "gerrit_newrev": "N/A", + "gerrit_account_name": "<%s>" % triggered_by, + "bm_start_datetime": str(bm_start_datetime), + "bm_end_datetime": str(bm_end_datetime), + "bm_src_project_lst": [project_to_create], + "git_tag_list": content.get('sr_list') + } + trigger_next("BUILD-MONITOR", bm_data) + + def change_project_config(self, build, base_project, profile_project, git_tag): + """ + Change release name from project config in OBS + Add the datetime into release name. + Eg: 'Release: .' ----> 'Release: 20141010..' + """ + # get project config + base_config = build.get_project_config(base_project) + profile_config = build.get_project_config(profile_project) + config = base_config + '%s\n' % profile_config + + release_name = 'Release: %s' % (git_tag.split(':')[-1]) + res = re.findall(r'^Release: ?\S+$', config, flags=re.MULTILINE) + if res: + if git_tag.split(':')[-1] not in res[0]: + note = '#Insert time from submission into release name\n' + release_name = '%s.%s' % (release_name, + res[0].split('Release:')[1].strip()) + config = config.replace( + res[0], '%s%s' % (note, release_name), 1) + else: + note = '#Add release name into prjconf\n' + config = note + '%s\n' % release_name + config + + # Add rpmbuild stage option + if os.getenv('PRERELEASE_RPMBUILD_STAGE'): + # Check if we've got required fields in TRIGGER_INFO + if not os.getenv('PRERELEASE_RPMBUILD_STAGE') in ('ba', 'bb'): + print 'Error: PRERELEASE_RPMBUILD_STAGE %s' % (os.getenv('PRERELEASE_RPMBUILD_STAGE')) + else: + rpmbuildstage = 'Rpmbuildstage: %s' % ( + os.getenv('PRERELEASE_RPMBUILD_STAGE')) + res = re.findall(r'^Rpmbuildstage: ?\S+$', + config, flags=re.MULTILINE) + if res: + config = config.replace(res[0], '%s' % (rpmbuildstage), 1) + else: + config = config + '#Add RpmbuildStage option into prjconf\n' + \ + '%s\n' % (rpmbuildstage) + + # Add "CopyLinkedPackages: yes" for prerelease projects. + if not re.search("CopyLinkedPackages:", config): + config = config + "\nCopyLinkedPackages: yes\n" + + return config + + def run_create_staging_base_project_obs(self, obs_api, obs_user, obs_passwd, remote_obs_api, remote_obs_user, remote_obs_passwd): + print( + "-------------------------[JOB STARTED : RUN CREATE STAGING BASE]------------------------------------") + + remote_build = BuildService( + remote_obs_api, remote_obs_user, remote_obs_passwd) + build = BuildService(obs_api, obs_user, obs_passwd) + + content = trigger_info(os.getenv('TRIGGER_INFO')) + project_type = "home:prerelease" + ref_name = "%s:%s" % (project_type, content.get("obs_target_prj")) + tag_name = content.get("virtual_sr").replace("/", ":") + base_ref_project = content.get('ref_base_prj') + source_project = content.get('ref_profile_prj') + + meta = build.get_meta(source_project) + config = self.change_project_config( + build, base_ref_project, source_project, tag_name) + project_name = "%s:%s" % (ref_name, tag_name) + + self.create_basechecker_project( + build, project_name, content, meta=meta, config=config, baserefproject=base_ref_project, linked='all') + + request_submits = self.sort_by_request_submit( + content.get("request_sr"), "project") + + self.copy_request_submits_package(build, project_name, request_submits) + + for idx, val in enumerate(request_submits): + if val and request_submits[idx]: + self.create_link_aggregate_package( + build, base_ref_project, project_name, request_submits[idx]) + + self.post_project_create(content, project_name) + + def create_basechecker_project(self, build, target, info, meta=None, config=None, baserefproject=None, linked=''): + """ + create project + """ + try: + if not build.exists(target): + try: + build.create_project( + target, None, description=json.dumps(info)) + except ObsError, error: + raise LocalError( + "Unable to create project %s: %s" % (target, error)) + + if meta: + # set meta + xml_meta = ElementTree.fromstringlist(meta) + # change the target project name + xml_meta.set('name', target) + # delete remote person + for person_element in xml_meta.findall('person'): + xml_meta.remove(person_element) + + # delete link project + for link in xml_meta.findall('link'): + xml_meta.remove(link) + + # replace + if baserefproject: + for repo_element in xml_meta.findall('repository'): + if linked: + repo_element.set('linkedbuild', linked) + for element in repo_element.findall('path'): + element.set('project', baserefproject) + + # add link project + element = ElementTree.Element( + 'link', {"project": "%s" % (baserefproject)}) + xml_meta.append(element) + # add target person + element = ElementTree.Element('person', {"userid": "%s" % ( + os.getenv("OBS_API_USERNAME")), "role": "maintainer"}) + xml_meta.append(element) + build.set_meta(ElementTree.tostring(xml_meta), target) + # set project config + build.set_project_config(target, config) + + # disable publish flag + build.disable_build_flag( + target, repo=None, flag="publish", status="disable") + # disable build flag + build.disable_build_flag( + target, repo=None, flag="build", status="disable") + print "\nTarget project %s created" % (target) + return True + else: + print "\nTarget project %s exist" % (target) + return False + except ObsError, error: + raise LocalError("Unable to create project %s: %s" % + (target, error)) + + def sort_by_request_submit(self, request_submits, key): + return sorted(request_submits, key=lambda submit: (submit[key])) + + def copy_request_submits_package(self, build, project_name, request_submits): + for submit in request_submits: + src_project = submit['project'] + for package in submit['packages'].split(','): + build.create_copy_pac( + src_project, package, project_name, package) + + def add_multi_link_project(self, build, project, link_project): + """ add link project """ + path = core.quote_plus(project) + kind = 'prj' + data = core.meta_exists( + metatype=kind, path_args=path, template_args=None, create_new=False) + if not data: + return + + root = ElementTree.fromstring(''.join(data)) + root.insert(3, ElementTree.Element('link', project=link_project)) + + res_data = ElementTree.tostring(root) + build.set_meta(res_data, project) + + def create_link_aggregate_package(self, build, src_project, dst_project, target_package): + sourceinfo = build.get_sourceinfo_list(src_project) + for package in sourceinfo: + if sourceinfo[package]: + link_prj, link_pkg = sourceinfo[package][-1].split('/') + if link_prj == src_project and link_pkg == target_package: + build.create_link_pac( + dst_project, target_package, dst_project, package) + + if re.search("_aggregate", package): + print("Copypac aggregate package: %s/%s" % + (dst_project, package)) + build.create_copy_pac( + src_project, package, dst_project, package) + aggregate_file_name = "_aggregate" + build.get_source_file( + src_project, package, aggregate_file_name) + content = "" + with open(aggregate_file_name, 'r') as f: + content = f.read() + + if not re.search("qemu_aggregate", package) and not re.search("java-1_6_0-sun_aggregate", package) and not re.search("jpackage-utils_aggregate", package): + content_xml_root = ElementTree.fromstringlist(content) + for element in content_xml_root.findall('aggregate'): + element.set('project', dst_project) + content = ElementTree.tostring(content_xml_root) + with open(aggregate_file_name, 'w') as f: + f.write(content) + commit_msg = "uploaded to copy pac %s/%s from %s" % ( + dst_project, package, src_project) + try: + build.commit_files(dst_project, package, [ + (aggregate_file_name, True)], commit_msg) + except ObsError, error: + raise UploadError( + "Unable to upload _aggregate to %s: %s" % (dst_project, error)) + + print("aggregate Copy Done.") + + def check_build_status(self, build, project): + + sleep(30) + status = build.getbuildstatus(project) + + for repo in status.get('buildstatus'): + if repo.get('code') != 'succeeded': + return False + + return True + + def add_linked_project_profile(self, obs_api, obs_user, obs_passwd): + build = BuildService(obs_api, obs_user, obs_passwd) + content = trigger_info(os.getenv("TRIGGER_INFO")) + project = content.get("project") or content.get('sourceproject') + + disc = build.get_description(project) + acceptable_string_disc = disc.replace("'", "\"") + info = json.loads(acceptable_string_disc) + source_project = info.get('ref_profile_prj') + config = build.get_project_config(source_project) + build_status = self.check_build_status(project) + + if not build_status: + print("Please Check Build status. !!") + return + + if build.exists(project): + self.remove_repos_path_project(build, project) + if not build_status: + print("Please Check build status. !!") + return + else: + try: + self.add_multi_link_project(build, project, source_project) + info["build_step"] = "profile_check" + build.set_description(json.dumps(info), project) + except ObsError, error: + raise LocalError("Can't add link project : %s" % (error)) + + else: + print("No search Project %s" % (project)) + + def remove_repos_path_project(self, build, project_name): + + path = core.quote_plus(project_name) + kind = 'prj' + data = core.meta_exists( + metatype=kind, path_args=path, template_args=None, create_new=False) + + if not data: + return + + root = ElementTree.fromstring(''.join(data)) + + for repo in root.iter('repository'): + for repo_path in repo.findall('path'): + repo.remove(repo_path) + + res_data = ElementTree.tostring(root) + build.set_meta(res_data, project_name) + + def create_project(self, build, project_name, info, base_ref_project, source_project=None): + + try: + if not build.exists(project_name): + try: + build.create_project( + project_name, src=source_project, description=json.dumps(info), linkto=base_ref_project, linkedbuild='all') + except ObsError, error: + raise LocalError( + "Unable to create project %s: %s" % (project_name, error)) + + else: + print "\nTarget project %s exist" % (project_name) + return False + except ObsError, error: + raise LocalError("Unable to create project %s: %s" % + (project_name, error)) + + def main(self, action=None): + """ + main + """ + obs_api = os.getenv("REF_TARGET_OBS_API_URL") + obs_user = os.getenv("REF_TARGET_OBS_API_USERNAME") + obs_passwd = os.getenv("REF_TARGET_OBS_API_PASSWD") + + remote_obs_api = os.getenv("REF_REMOTE_OBS_API_URL") + remote_obs_user = os.getenv("REF_REMOTE_OBS_API_USERNAME") + remote_obs_passwd = os.getenv("REF_REMOTE_OBS_API_PASSWD") + + info = trigger_info(os.getenv('TRIGGER_INFO')) + for loop in range(1): + if action == 'base_check' or action is None: + + for field in ('obs_target_prj', 'build_step', 'virtual_sr', 'ref_base_prj', 'ref_profile_prj', 'request_sr', 'base_target_prj'): + if field not in info: + print 'Error: TRIGGER_INFO doesn\'t contain %s' % field + return -1 + return self.run_create_staging_base_project_obs(obs_api, obs_user, obs_passwd, remote_obs_api, remote_obs_user, remote_obs_passwd) + elif action == 'profile_check': + return self.add_linked_project_profile(obs_api, obs_user, obs_passwd) + else: + print('not enable action = %s' % (action)) + + return True + + +if __name__ == '__main__': + trigger = CreateProjectObs() + sys.exit(trigger.main(sys.argv[1])) + diff --git a/job_base_verification_dispatcher.py b/job_base_verification_dispatcher.py new file mode 100644 index 0000000..df86f52 --- /dev/null +++ b/job_base_verification_dispatcher.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python +# +# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc. +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; version 2 +# of the License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# +"""This job require the Parameterized Build plugin, it's triggered +by http request get method to the job url, for example +http://my.host.com/job/JOB_NAME/buildWithParameters with data +""" + +import sys +import os + +from common.buildtrigger import trigger_next, trigger_info +from common.buildservice import BuildService +from common.backenddb import BackendDB +from common.snapshot import snapshot_project_enabled +from common.prerelease import is_prerelease_project, get_info_from_prerelease_name +from common.trbs import is_trbs_project +from common import buildmonitor_db +from datetime import datetime + + +def change_name_snapshot_to_ref_project(target_project, snapshot): + snapshot_version_num = snapshot.split('_')[1] + project_name = "%s:ref:%s" % (target_project, snapshot_version_num) + + return project_name + + +def change_format_trigger_info(info): + + ref_base_prj = change_name_snapshot_to_ref_project( + info.get('base_target_prj'), info.get('ref_base_prj')) + latest_base_prj = change_name_snapshot_to_ref_project( + info.get('base_target_prj'), info.get('latest_base_prj')) + ref_profile_prj = change_name_snapshot_to_ref_project( + info.get('obs_target_prj'), info.get('ref_profile_prj')) + + data = dict(info) + data.update({"ref_base_prj": ref_base_prj,"latest_base_prj": latest_base_prj,"ref_profile_prj": ref_profile_prj}) + + return data + + +def main(obs_event_fields): + """The main body""" + + basecheck_info = change_format_trigger_info(obs_event_fields) + trigger_next("BASECHECK", basecheck_info) + + +if __name__ == '__main__': + # Jenkins task dispatcher, receive events from OBS jenkins notify plugin + # and create files to trigger downstream jobs + if len(sys.argv) != 1: + raise SystemExit('This job do NOT support to carry parameters') + + sys.exit(main(trigger_info(os.getenv('TRIGGER_INFO')))) + diff --git a/job_buildmonitor.py b/job_buildmonitor.py index a1cd07d..fdabb2a 100644 --- a/job_buildmonitor.py +++ b/job_buildmonitor.py @@ -2186,6 +2186,40 @@ def sync_sr_submit_for_sync_sr_map(bm_sync_git_tag, bm_member_git_tag_list, sour query_data = (sync_sr_status_id, member_sr_status_id, build_snapshot_id) buildmonitor_db.do_query(query, query_data) +#================================================================================== +# Base_chacker +def base_checker_sr_submit_for_sync_sr_map(bm_sync_git_tag, bm_member_git_tag_list, source_snapshot_name=None): + print 'enter create_sr_submit_for_basechecker_sr_map\n' + + #sync_type + bm_sync_type = "B" + + # get curr_sr_status_id + query = "SELECT id FROM sr_status WHERE sr = %s" + query_data = (bm_sync_git_tag,) + sync_sr_status_id = buildmonitor_db.get_value_from_query_data(query, query_data) + + query = "SELECT id FROM sr_status WHERE sr = %s" + query_data = (bm_member_git_tag_list,) + member_sr_status_id = buildmonitor_db.get_value_from_query_data(query, query_data) + + #Fake snapshot name + query = "SELECT id FROM build_snapshot WHERE build_project_id = %s AND snapshot_name = %s" + query_data = ('0', source_snapshot_name,) + build_snapshot_id = buildmonitor_db.get_value_from_query_data(query, query_data) + if build_snapshot_id == INVALID_ID: + query = "INSERT INTO build_snapshot (build_project_id, snapshot_name) " \ + "VALUES(%s, %s)" + query_data = ('0', source_snapshot_name) + buildmonitor_db.do_query(query, query_data) + query = "SELECT id FROM build_snapshot WHERE build_project_id = %s AND snapshot_name = %s" + query_data = ('0', source_snapshot_name,) + build_snapshot_id = buildmonitor_db.get_value_from_query_data(query, query_data) + + query = "INSERT INTO sync_sr_map (sync_sr_status_id, member_sr_status_id, build_snapshot_id, sync_type) VALUES(%s, %s, %s, %s)" + query_data = (sync_sr_status_id, member_sr_status_id, build_snapshot_id, bm_sync_type) + buildmonitor_db.do_query(query, query_data) + def sync_sr_submit_for_sr_status(bm_git_tag): print 'enter sync_sr_submit_for_sr_status\n' @@ -2535,6 +2569,45 @@ def main(): #sync_sr_update_comment(bm_sync_git_tag, submitter, bm_member_sr_info) #======================================================= + # Sync_SR_Submit_BaseCheck + elif bm_stage == 'Sync_SR_Submit_BaseCheck': + print '[Sync_SR_Submit_baseCheck]\n' + + # get vars + commit_date = transform_date(content.get("commit_date")) + commit_msg = truncate_msg(content.get("commit_msg")) + submit_date = transform_date(content.get("submit_date")) + submit_msg = truncate_msg(content.get("submit_msg")) + submitter = content.get("submitter") + gerrit_project = content.get("gerrit_project") + gerrit_newrev = content.get("gerrit_newrev") + gerrit_account_name = content.get("gerrit_account_name") + bm_start_datetime = content.get("bm_start_datetime") + bm_end_datetime = content.get("bm_end_datetime") + bm_src_project_lst = content.get("bm_src_project_lst") + + bm_member_sr_info = content.get("bm_member_sr_info") + print 'bm_member_sr_info(%s)\n' % (bm_member_sr_info) + bm_sync_git_tag = content.get("bm_sync_git_tag") + bm_sync_git_tag_list = content.get("git_tag_list") + print 'bm_sync_git_tag(%s)\n' % (bm_sync_git_tag) + + # func call + ### 1. bm_sync_git_tag (sr_status + sr_commit + sr_status_detail + sr_stage) + sync_sr_submit_for_sr_status(bm_sync_git_tag) + sync_sr_submit_for_sr_commit(commit_date, commit_msg, submit_date, submit_msg, + submitter, bm_sync_git_tag, gerrit_project, + gerrit_newrev, gerrit_account_name) + sync_package_build_for_sr_detail_sr_stage(bm_sync_git_tag, bm_start_datetime, + bm_end_datetime, + bm_src_project_lst) + + + # member sr & sync_sr for [sync_sr_map] table + for git in bm_sync_git_tag_list: + base_checker_sr_submit_for_sync_sr_map(bm_sync_git_tag, git) + + #======================================================= # [PRE] Build #======================================================= # [PRE] Snap diff --git a/job_jobs_dispatcher.py b/job_jobs_dispatcher.py index 6534e8c..566efb0 100755 --- a/job_jobs_dispatcher.py +++ b/job_jobs_dispatcher.py @@ -44,6 +44,14 @@ def isReadyForTrbs(build, project): """ return True +def isBaseCheckPrelease(project): + """ + isBaseCheckerPrerelease + """ + if ':basechecker:' in project: + return True + return False + def isReadyForCreateSnapshot(build, backenddb, project): # whether project is not enabled @@ -110,7 +118,15 @@ def main(obs_event_fields): redis_port = int(os.getenv("REDIS_PORT")) backenddb = BackendDB(redis_host, redis_port) if(is_prerelease_project(project)): - if(isReadyForPrerelease(build, project)): + if(isBaseCheckPrelease(project)): + info = build.get_info(project) + if info.get('build_step') == "profile_check": + print 'All the repositories are published for project %s.' \ + 'Triggering the prerelease.' % (project) + trigger_next('#PRERELEASE#%s#%s' % (project, event_type), obs_event_fields) + elif info.get('build_step') == "base_check": + trigger_next('#BASE-CHECK#%s#%s' % (project, event_type), obs_event_fields) + elif(isReadyForPrerelease(build, project)): print 'All the repositories are published for project %s.' \ 'Triggering the prerelease.' % (project) target_project_name = get_info_from_prerelease_name(project)[0] diff --git a/packaging/jenkins-scripts.spec b/packaging/jenkins-scripts.spec index fb036c4..da6e5a0 100755 --- a/packaging/jenkins-scripts.spec +++ b/packaging/jenkins-scripts.spec @@ -209,6 +209,8 @@ fi %{destdir}/job_gbsdbbuild_one_repoarch_build.py %{destdir}/job_gbsdbbuild_update_meta.py %{destdir}/job_git_sync_downstream.py +%{destdir}/job_base_verification.py +%{destdir}/job_base_verification_dispatcher.py %files common %defattr(-,jenkins,jenkins)