1. Deploy 'Daily GBS Build' 92/172092/1
authorSoonKyu Park <sk7.park@samsung.com>
Fri, 9 Mar 2018 23:45:13 +0000 (08:45 +0900)
committerSoonKyu Park <sk7.park@samsung.com>
Fri, 9 Mar 2018 23:45:13 +0000 (08:45 +0900)
2. Handle '_aggregate' package
3. Change db-query handling method to speed-up 'gbs_build_package' db query

Change-Id: I179a16e41f431350fa60faa0dc12a7de8d0fb8ed

common/gbsutils.py
job_buildmonitor.py
job_gbs_build_dispatcher.py
job_gbs_dashboard_build.py
job_gbsdbbuild_create_snapshot.py
job_gbsfullbuild_buildlogs.py
job_gbsfullbuild_image_creator.py

index d6d8b86..30ae964 100644 (file)
@@ -159,6 +159,23 @@ class GBSBuild:
                 path_prj_arch = line.split('"')[1]+'##'+line.split('"')[3]
                 path_prj_list[repo_tmp].append(path_prj_arch)
 
+        #handle for _aggregate file
+        aggregate_list = []
+        for root, dirs, files in os.walk(prjdir+'/'+obs_prj):
+            for each_file in files:
+                if each_file == "_aggregate":
+                    aggregate_file=os.path.join(root,each_file)
+                    lines = open(aggregate_file).readlines()
+                    for line in lines:
+                        if line.find('<binary>') != -1:
+                            binary_rpm = line.split('<binary>')[1].split('</binary>')[0]
+                            aggregate_list.append({'binary_rpm':binary_rpm})
+                            aggregate_list[-1]['repository_source_target_list'] = []
+                        if line.find('<repository source') != -1:
+                            src_repo = line.split('<repository source="')[1].split('"')[0]
+                            target_repo = line.split('target="')[1].split('"')[0]
+                            aggregate_list[-1]['repository_source_target_list'].append(src_repo+'##'+target_repo)
+
         prj_src_root = os.path.join(gbsbuild_workspace,'SRC-ROOT',obs_prj)
         build_root = os.path.join(gbsbuild_workspace,'GBS-ROOT')
         live_root = os.path.join(gbsbuild_workspace,'live')
@@ -207,15 +224,17 @@ class GBSBuild:
         self.rm_pkg_list_array = rm_pkg_list_array
         self.gbsbuild_tag = gbsbuild_tag
         self.trigger_category = trigger_category
+        self.aggregate_list = aggregate_list
 
         print 'profile: %s,obs_prj: %s,t_ver: %s,t_ver_path: %s,t_branch: %s,\
                basic_snapshot_url: %s,snapshot_num: %s,snapshot_url: %s,snapshot_build_id: %s,\
                repository: %s,arch_list: %s, path_prj_list: %s,prj_src_root: %s\
                build_root: %s, basic_url: %s, is_fullbuild: %s, build_pkg_list: %s\
-               trigger_category: %s, gbsbuild_workspace: %s'\
+               trigger_category: %s, gbsbuild_workspace: %s, aggregate_list: %s'\
                 %(profile,obs_prj,t_ver,t_ver_path,t_branch,basic_snapshot_url,snapshot_num,\
                  snapshot_url,snapshot_build_id,repository,arch_list,path_prj_list,prj_src_root,\
-                 build_root,basic_url,is_fullbuild,build_pkg_list,trigger_category,gbsbuild_workspace) 
+                 build_root,basic_url,is_fullbuild,build_pkg_list,trigger_category,\
+                 gbsbuild_workspace,aggregate_list) 
 
     def convert_gbsbuild_to_dictionary(self):
         dic = {}
@@ -242,6 +261,7 @@ class GBSBuild:
         dic['rm_pkg_list_array'] = self.rm_pkg_list_array
         dic['gbsbuild_tag'] = self.gbsbuild_tag
         dic['trigger_category'] = self.trigger_category
+        dic['aggregate_list'] = self.aggregate_list
 
         return dic
 
@@ -633,11 +653,11 @@ class GBSBuild:
             if not os.path.exists(obs_liverepo):
                 os.mkdir(obs_liverepo)
 
-        obs_liverepo=os.path.join(obs_liverepo,self.gbsbuild_tag.split('/')[1])
+        obs_liverepo=os.path.join(obs_liverepo,self.gbsbuild_tag.split('/')[-1])
         if not os.path.exists(obs_liverepo):
             os.mkdir(obs_liverepo)
 
-        sync_dest=os.path.join(sync_dest,self.gbsbuild_tag.split('/')[1])
+        sync_dest=os.path.join(sync_dest,self.gbsbuild_tag.split('/')[-1])
 
         buildlogs_root = os.path.join(obs_liverepo,'buildlogs')
         os.mkdir(buildlogs_root)
@@ -660,6 +680,19 @@ class GBSBuild:
                     print 'copy destination dir: %s' %(live_repo_root+'/'+arch+'/')
                     for file_name in glob.glob(build_result_repo_path+'/'+gbs_build_arch+'/RPMS/*.'+arch+'.rpm'):
                         shutil.copy(file_name, live_repo_root+'/'+arch+'/')
+                    #Handle for files whic are specified in _aggregate
+                    print "Handle aggregate list: %s" %self.aggregate_list
+                    for aggregate_rpm in self.aggregate_list:
+                        for repository_source_target in aggregate_rpm['repository_source_target_list']:
+                            source_repo = repository_source_target.split('##')[0]
+                            target_repo = repository_source_target.split('##')[1]
+                            if source_repo == repository:
+                                if not os.path.exists(obs_liverepo+'/'+target_repo):
+                                    os.mkdir(obs_liverepo+'/'+target_repo)
+                                if not os.path.exists(obs_liverepo+'/'+target_repo+'/'+arch):
+                                    os.mkdir(obs_liverepo+'/'+target_repo+'/'+arch)
+                                for file_name in glob.glob(build_result_repo_path+'/'+gbs_build_arch+'/RPMS/'+aggregate_rpm['binary_rpm']+'*.'+arch+'.rpm'):
+                                    shutil.copy(file_name, obs_liverepo+'/'+target_repo+'/'+arch+'/')
                 #Remove folder if there is no file in arch directory
                 if len(os.walk(live_repo_root+'/'+arch).next()[2]) == 0:
                     os.rmdir(live_repo_root+'/'+arch)
index b77cde7..2e4ba37 100644 (file)
@@ -1595,19 +1595,19 @@ def update_gbs_build_package(content):
     print "gbs_build_target_id: %s" %gbs_build_target_id
 
     query_data=()
-    query = ""
+    query = "SELECT gc.id,gc.commit_id,gr.name FROM git_commit gc, git_repository gr WHERE "
     for each_pkg_git_repo,each_pkg_commit_id in zip(bm_pkg_git_repo_list,bm_pkg_git_commit_id_list):
-        query += "(SELECT gc.id,gc.commit_id,gr.name FROM git_commit gc, git_repository gr " \
-                "WHERE gr.name = %s and gc.commit_id = %s) union "
+        query += "gr.name = %s and gc.commit_id = %s or "
         query_data+=(each_pkg_git_repo,each_pkg_commit_id,)
-    query = query.rstrip("union ")
+    query = query.rstrip("or ")
 
-    git_id_dic={}
     query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data)
-    for git_id,git_commit_id,git_repository in query_result:
+
+    git_id_dic = {}
+    for each_result in query_result:
         print "git repo db: git_id: %s git_commit_id: %s git_repository: %s"\
-               %(git_id,git_commit_id,git_repository)
-        git_id_dic[git_repository]=git_id
+               %(each_result[0],each_result[1],each_result[2])
+        git_id_dic[each_result[2]] = each_result[0]
 
     # bulk insert
     query = "INSERT INTO gbs_build_package (gbs_build_target_id, git_id, " \
index 238c5de..363c452 100755 (executable)
@@ -2,9 +2,11 @@
 
 import sys
 import os
+import urllib2
 
 from common.buildtrigger import trigger_info, trigger_next, get_pending_builds_with_parameter, cancel_pending_build_with_id, get_running_builds_with_parameter, cancel_running_build_with_id
 from common.gbsutils import gbs_update_dashboard
+from datetime import datetime;
 
 def menu_options(menu):
     return ["", "Deps/Full Build", "Remove Packages", "New project gbs fullbuild", "Link Project Upgrade"].index(menu)
@@ -83,9 +85,64 @@ def release_snapshot(contents):
     print bypass_data
     gbs_update_dashboard(bypass_data, via='direct_call')
 
+def get_snapshot_id(obs_prj):
+    #psk temprary for building test
+    #curr_date=datetime.today().strftime("%Y%m%d")
+    curr_date='20180305'
+    basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/'
+    profile = obs_prj.replace("Tizen:","").replace(":","-").lower()
+    profile_url = basic_url+profile+'/'
+    snapshot_id_prefix = "tizen-"+profile
+
+    profile_url_content=urllib2.urlopen(profile_url).read()
+
+    if profile_url_content.find(curr_date) == -1:
+        return (None,None)
+
+    if os.getenv("USE_WHICH_SNAPSHOT") == "first":
+        snapshot_num_used = curr_date+'.1'
+    else:
+        last_snapshot_num='1'
+        for line in profile_url_content.split('\n'):
+            if line.find(curr_date) != -1:
+                snapshot_num=line.split('<a href="'+snapshot_id_prefix+'_')[1].split('/"')[0].split('.')[1]
+                if int(snapshot_num) > int(last_snapshot_num):
+                    last_snapshot_num = snapshot_num
+        snapshot_num_used = curr_date+"."+last_snapshot_num
+
+    snapshot_id=snapshot_id_prefix+"_"+snapshot_num_used
+    tag_name="RELEASE/"+snapshot_id_prefix.upper()+"/"+snapshot_num_used
+
+    return (snapshot_id,tag_name)
+
+def release_build():
+    print "Start release build (periodic build)"
+
+    for obs_prj in os.getenv("DAILY_BUILD_OBS_PROJECTS").split(' '):
+        (snapshot,tag) = get_snapshot_id(obs_prj)
+        print "snapshot:%s tag:%s" %(snapshot,tag)
+        if not snapshot:
+            print "Skip release build for %s because there is no snapshot created today" %obs_prj
+            continue
+
+        bypass_data = {
+            "gbs_type"       : "Release Build",
+            "tag"            : tag,
+            "source_snapshot": snapshot,
+            "main_project"   : obs_prj,
+            "mode"           : "queued",
+            }
+        gbs_update_dashboard(bypass_data, via='direct_call')
+        trigger_next("gbs-dashboard-build_%s" % tag.replace('/', '_'), bypass_data)
+
 def main():
     """The main body"""
 
+    # If it is not triggered by trigger_next, consider this build as release buil
+    if os.getenv("TRIGGER_INFO") == None:
+        release_build()
+        return
+
     contents = trigger_info(os.getenv("TRIGGER_INFO"))
 
     if contents.get("menu", None) == "Abort":
index 044bb46..aa7644b 100644 (file)
@@ -54,16 +54,18 @@ def main():
         trigger_category = contents.get("gbs_type")
     if not new_pkg_list:
         new_pkg_list = ''
-        for pkg in contents.get("packages"):
-            if pkg.values()[0]['build'] == 'include':
-                new_pkg_list += (pkg.keys()[0]+' '+pkg.values()[0]['commit']+' ')
-                new_pkg_list.rstrip(' ')
+        if contents.get("packages"):
+            for pkg in contents.get("packages"):
+                if pkg.values()[0]['build'] == 'include':
+                    new_pkg_list += (pkg.keys()[0]+' '+pkg.values()[0]['commit']+' ')
+                    new_pkg_list.rstrip(' ')
     if not rm_pkg_list:
         rm_pkg_list = ''
-        for pkg in contents.get("packages"):
-            if pkg.values()[0]['build'] != 'include':
-                rm_pkg_list += (pkg.keys()[0]+' ')
-                rm_pkg_list.rstrip(' ')
+        if contents.get("packages"):
+            for pkg in contents.get("packages"):
+                if pkg.values()[0]['build'] != 'include':
+                    rm_pkg_list += (pkg.keys()[0]+' ')
+                    rm_pkg_list.rstrip(' ')
 
     gbsbuild_tag = contents.get("tag")
 
index 045b63b..d177f7d 100755 (executable)
@@ -26,6 +26,7 @@ import sys
 import re
 import datetime
 import ast
+import subprocess
 
 from common.repomaker import find_files, RepoMaker, RepoMakerError
 from common.buildtrigger import trigger_info, trigger_next
@@ -49,7 +50,7 @@ def bm_update_gbsdbbuild_snapshot_failed(status_reason,gbsbuild_tag):
               }
     trigger_next("BUILD-MONITOR-4-%s" %bm_stage, bm_data)
 
-def replace_ks_file_for_gbsdbbuild(kickstart,build_id):
+def replace_ks_file_for_gbsdbbuild(kickstart,build_id,gbsbuild_trigger_category):
     gbsdbbuild_dl_postfix = os.getenv('GBSDBBUILD_DL_POSTFIX')
     replaced_ks=''
 
@@ -65,15 +66,21 @@ def replace_ks_file_for_gbsdbbuild(kickstart,build_id):
             else:
                 repo_name=line.split('--name=')[1].split(' ')[0]
                 orig_line=line.replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/repos/')+' --priority=99\n'
-                add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/tizen/').replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/'+build_id+'/repos/')+' --priority=1'
-                line=orig_line + add_line
+                if gbsbuild_trigger_category.lower() == 'release build':
+                    add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/releases/tizen/').replace('/@BUILD_ID@/repos/','/'+build_id+'/repos/')+' --priority=1'
+                else:
+                    add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/tizen/').replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/'+build_id+'/repos/')+' --priority=1'
+                if gbsbuild_trigger_category.lower() == 'dependency build':
+                    line=orig_line + add_line
+                else:
+                    line=add_line
         replaced_ks+=line+'\n'
     
     return replaced_ks
 
 
 def prepare_trigger_data(images, build_id, path_repo, project,
-                           url_pub_base,gbsbuild_tag):
+                           url_pub_base,gbsbuild_tag,gbsbuild_trigger_category):
     """
     prepare_trigger_data:
         Prepare the trigger data
@@ -98,7 +105,7 @@ def prepare_trigger_data(images, build_id, path_repo, project,
             #end of temprary
             name = ksname.replace('.ks', '')
 
-            kickstart = replace_ks_file_for_gbsdbbuild(kickstart,build_id)
+            kickstart = replace_ks_file_for_gbsdbbuild(kickstart,build_id,gbsbuild_trigger_category)
 
             if download_host:
                 # replace host name
@@ -188,10 +195,16 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
 #        local_targets  = snapshot.targets
 #        local_path     = snapshot.path
 #        local_dir      = snapshot.dir
-        local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1]
-        local_targets  = create_local_targets(gbsbuild_dic)
-        local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'])
-        local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id)
+        if gbsbuild_dic['gbsbuild_tag'].startswith("RELEASE"):
+            local_build_id = gbsbuild_dic['snapshot_build_id']
+            local_targets  = create_local_targets(gbsbuild_dic)
+            local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'releases/tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'])
+            local_path = os.path.join(base_path,local_dir,local_build_id)
+        else:
+            local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1]
+            local_targets  = create_local_targets(gbsbuild_dic)
+            local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'])
+            local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id)
 
         # Delete Snapshot() instance.
 #        del snapshot
@@ -207,7 +220,7 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
 
     targets = local_targets
     live_repo_path = os.path.join(live_repo_base,
-                                  project.replace(':', ':/'),gbsbuild_dic['gbsbuild_tag'].split('/')[1])
+                                  project.replace(':', ':/'),gbsbuild_dic['gbsbuild_tag'].split('/')[-1])
     parent_snapshot_path = os.path.join(base_path,'snapshots/tizen',
                                         gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],
                                         gbsbuild_dic['snapshot_build_id'])
@@ -276,9 +289,13 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
                                     os.getenv('GERRIT_REVIEW_URL'),
                                     live_repo_path, repo['Architectures'])
 
+    if gbsbuild_dic['trigger_category'].lower() == 'release build':
+        repo_path = os.path.join(local_dir,local_build_id)
+    else:
+        repo_path = os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id)
     return {'project': project,
             'repo': repos,
-            'repo_path': os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id),
+            'repo_path': repo_path,
             'build_id': local_build_id,
             'imagedata': imagedatas
         }
@@ -303,10 +320,16 @@ def main():
     snapshot_name = gbsbuild_dic['snapshot_build_id']
     project = gbsbuild_dic['obs_prj']
 
-    snapshot_name = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1]
-    snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\
-                       'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\
-                       gbsbuild_dic['snapshot_build_id'],snapshot_name)
+    if gbsbuild_dic['gbsbuild_tag'].startswith("RELEASE"):
+        snapshot_name = gbsbuild_dic['snapshot_build_id']
+        snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\
+                           'releases/tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\
+                           snapshot_name)
+    else:
+        snapshot_name = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsbuild_tag'].split('/')[1]
+        snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\
+                           'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\
+                           gbsbuild_dic['snapshot_build_id'],snapshot_name)
     bm_start_datetime = datetime.datetime.now()
 
 ##    gbs_remote_jenkins_build_job({'bm_stage':'GBSDBBuild_Snapshot_Start',
@@ -329,7 +352,7 @@ def main():
     print "build_id_liverepo = %s" %build_id_liverepo
     cmd = 'rm -rf `find '+build_id_liverepo+\
           ' ! -name '+os.path.basename(build_id_liverepo)+\
-          ' | grep -v '+gbsbuild_dic['gbsbuild_tag'].split('/')[1]+'`'
+          ' | grep -v '+gbsbuild_dic['gbsbuild_tag'].split('/')[-1]+'`'
     print "clear live root command: %s" %cmd
     subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
 
@@ -355,7 +378,7 @@ def main():
                                         repo_data['imagedata'],
                                         repo_data['build_id'],
                                         repo_data['repo_path'],
-                                        project, base_url, gbsbuild_dic['gbsbuild_tag'])
+                                        project, base_url, gbsbuild_dic['gbsbuild_tag'], gbsbuild_dic['trigger_category'])
 
     # trigger image creation jobs
     trigger_image_creation(trigger_data)
index c8a7aec..fcb9fd4 100755 (executable)
@@ -44,7 +44,7 @@ def main():
     if gbsbuild_dic:
         live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'))
         sync_src = os.path.join(live_repo_base,obs_prj.replace(':',':/'),
-                       gbsbuild_dic['gbsbuild_tag'].split('/')[1],'buildlogs')
+                       gbsbuild_dic['gbsbuild_tag'].split('/')[-1],'buildlogs')
     else:
         live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSFULLBUILD_DL_POSTFIX'))
         sync_src = os.path.join(live_repo_base,obs_prj.replace(':',':/'),'buildlogs')
index d45a298..a7b30c5 100755 (executable)
@@ -379,7 +379,7 @@ def main():
         mode = "image_finished"
         gbs_update_data = {"tag" : fields['gbsbuild_tag'],
                            "mode" : mode,
-                           "reason" : {"ks_name":fields['name'],"repo":fields['repo'],"status":gbs_img_status}}
+                           "reason" : {"ks_name":fields['name'],"status":gbs_img_status}}
         gbs_update_dashboard(gbs_update_data,via='trigger',trigger_name=mode)