DB work for GBS Dashboard build 38/170038/2
authorSoonKyu Park <sk7.park@samsung.com>
Tue, 13 Feb 2018 05:03:36 +0000 (14:03 +0900)
committerSoonKyu Park <sk7.park@samsung.com>
Tue, 13 Feb 2018 05:17:10 +0000 (14:17 +0900)
Change-Id: I13f18524cf93cd516bdb237a4b91cd1795a68b85

common/gbsutils.py
job_buildmonitor.py
job_gbs_dashboard_build.py
job_gbsdbbuild_create_snapshot.py
job_gbsdbbuild_one_repoarch_build.py [new file with mode: 0644]
job_gbsfullbuild_buildlogs.py
job_gbsfullbuild_image_creator.py
packaging/jenkins-scripts.spec

index 3362247..b494cad 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python
 #
 # Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
 #
@@ -15,6 +14,7 @@
 #    along with this program; if not, write to the Free Software
 #    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
 #
+#!/usr/bin/python
 
 import sys
 import os
@@ -25,8 +25,12 @@ import tempfile
 import atexit
 import urllib2
 import glob
+import base64
+import json
 from common.git import Git, clone_gitproject
 from common.utils import sync
+from common.buildtrigger import remote_jenkins_build_job
+from urllib import quote_plus
 
 class RuntimeException(Exception):
     """Local error handler"""
@@ -50,11 +54,35 @@ def gbs_conf_prefix(t_ver,profile,build_repository):
 
     return gbs_conf_text
 
+def gbs_remote_jenkins_build_job(data):
+    encoded_data = base64.b64encode(json.dumps(data))
+    remote_jenkins_build_job(os.getenv('PUBLIC_JENKINS_URL'), \
+                             os.getenv('PUBLIC_JENKINS_USER'), \
+                             os.getenv('PUBLIC_JENKINS_PW'), \
+                             jobname='BUILD-MONITOR', \
+                             data='TRIGGER_INFO=%s' % (quote_plus(encoded_data)))
+
+
+def find_info_from_one_buildlog(buildlog_file,spec_file_name):
+    cmd = 'tail -100 '+buildlog_file
+    buildlog_tail=subprocess.check_output(cmd,shell=True)
+
+    buildtime = ''
+    built_rpm_files = []
+    for each_line in buildlog_tail.split('\n'):
+        if each_line.find('Wrote: /home/abuild/rpmbuild/RPMS/') != -1 and each_line.find('-debuginfo-') == -1 and each_line.find('-debugsource-') == -1:
+            rpm_file_name=each_line.split('/')[-1]
+            built_rpm_files.append(rpm_file_name)
+        if each_line.find('finished "build '+spec_file_name+'.spec') != -1:
+            buildtime = each_line.split('[')[1].split('s]')[0].replace(' ','')
+
+    print "spec_file_name: %s buildtime: %s built_rpm_files: %s" %(spec_file_name,buildtime,built_rpm_files)
+    return (buildtime,built_rpm_files)
 
 class GBSBuild:
     """A class which supports with statement"""
 
-    def __init__(self, gbsdbbuild_project_id, obs_prj, basic_url, snapshot_num, full_dep_sel, new_pkg_list, rm_pkg_list, gbsbuild_workspace):
+    def __init__(self, dashboard_gbsbuild_num, obs_prj, basic_url, snapshot_num, trigger_category, new_pkg_list, rm_pkg_list, gbsbuild_workspace):
         """ Initialize GBSBuild class"""
         print "-----------------------------------------------------"
         print "Initialize GBS Build Class"
@@ -128,10 +156,10 @@ class GBSBuild:
         build_root = os.path.join(gbsbuild_workspace,'GBS-ROOT')
         live_root = os.path.join(gbsbuild_workspace,'live')
 
-        if full_dep_sel == 'full build':
-            is_fullbuild = 'true'
-        else:
+        if trigger_category == 'dependency build':
             is_fullbuild = 'false'
+        else:
+            is_fullbuild = 'true'
 
         build_pkg_list = {}
         if new_pkg_list:
@@ -162,6 +190,7 @@ class GBSBuild:
         self.repository = repository
         self.arch_list = arch_list
         self.path_prj_list = path_prj_list
+        self.gbsbuild_workspace = gbsbuild_workspace
         self.prj_src_root = prj_src_root
         self.build_root = build_root
         self.live_root = live_root
@@ -169,15 +198,17 @@ class GBSBuild:
         self.is_fullbuild = is_fullbuild
         self.build_pkg_list = build_pkg_list
         self.rm_pkg_list_array = rm_pkg_list_array
-        self.gbsdbbuild_project_id = gbsdbbuild_project_id
+        self.dashboard_gbsbuild_num = dashboard_gbsbuild_num
+        self.trigger_category = trigger_category
 
         print 'profile: %s,obs_prj: %s,t_ver: %s,t_ver_path: %s,t_branch: %s,\
                basic_snapshot_url: %s,snapshot_num: %s,snapshot_url: %s,snapshot_build_id: %s,\
                repository: %s,arch_list: %s, path_prj_list: %s,prj_src_root: %s\
-               build_root: %s, basic_url: %s, is_fullbuild: %s, build_pkg_list: %s'\
+               build_root: %s, basic_url: %s, is_fullbuild: %s, build_pkg_list: %s\
+               trigger_category: %s, gbsbuild_workspace: %s'\
                 %(profile,obs_prj,t_ver,t_ver_path,t_branch,basic_snapshot_url,snapshot_num,\
                  snapshot_url,snapshot_build_id,repository,arch_list,path_prj_list,prj_src_root,\
-                 build_root,basic_url,is_fullbuild,build_pkg_list) 
+                 build_root,basic_url,is_fullbuild,build_pkg_list,trigger_category,gbsbuild_workspace
 
     def convert_gbsbuild_to_dictionary(self):
         dic = {}
@@ -194,6 +225,7 @@ class GBSBuild:
         dic['repository'] = self.repository
         dic['arch_list'] = self.arch_list
         dic['path_prj_list'] = self.path_prj_list
+        dic['gbsbuild_workspace'] = self.gbsbuild_workspace
         dic['prj_src_root'] = self.prj_src_root
         dic['build_root'] = self.build_root
         dic['live_root'] = self.live_root
@@ -201,10 +233,38 @@ class GBSBuild:
         dic['is_fullbuild'] = self.is_fullbuild
         dic['build_pkg_list'] = self.build_pkg_list
         dic['rm_pkg_list_array'] = self.rm_pkg_list_array
-        dic['gbsdbbuild_project_id'] = self.gbsdbbuild_project_id
+        dic['dashboard_gbsbuild_num'] = self.dashboard_gbsbuild_num
+        dic['trigger_category'] = self.trigger_category
 
         return dic
 
+    def get_build_pkg_data(self):
+        print '----start get_build_pkg_data-----------------------------------'
+
+        build_pkg_data = []
+        for git_repository in self.build_pkg_list:
+            git_commit_id = self.build_pkg_list[git_repository]
+            tmpdir = tempfile.mkdtemp(prefix=os.getenv('WORKSPACE')+'/')
+            atexit.register(shutil.rmtree, tmpdir)
+            prjdir = os.path.join(tmpdir, git_repository)
+            if not clone_gitproject(git_repository, prjdir):
+                print >> sys.stderr, 'Error cloning %s' %git_repository
+                return 1
+            mygit = Git(prjdir)
+            mygit.checkout(git_commit_id)
+            git_committer =  mygit.show(git_commit_id).split('\n')[1].split(':')[1].lstrip(' ')
+            git_commit_message = mygit.show(git_commit_id).split('\n')[4]
+
+            build_pkg_data.append({'git_repository':git_repository})
+            build_pkg_data[-1]['git_commit_id'] = git_commit_id
+            build_pkg_data[-1]['git_committer'] = git_committer
+            build_pkg_data[-1]['git_commit_message'] = git_commit_message
+
+            build_pkg_data[-1]['is_remove'] = 0
+
+        print 'build_pkg_data info for gbsbuld: %s' %build_pkg_data
+        return build_pkg_data
+            
 
     def ___build_pkg_list_apply(self,manifest_content):
         print '----start ___build_pkg_list_apply-----------------------------------'
@@ -394,16 +454,66 @@ class GBSBuild:
         f.close()
 
 
+    def _get_gbs_build_result_status(self,repo,arch):
+        print '----start _get_gbs_build_result_status-----------------------------------'
+
+        profile_list=os.listdir(self.build_root+'/local/repos/')
+        for profile in profile_list:
+            if profile.find(repo) != -1:
+                profile_path=profile
+
+        index_file=self.build_root+'/local/repos/'+profile_path+'/'+arch+'/index.html'
+
+        f = open(index_file,'rb')
+        build_result=f.read()
+        f.close()
+
+        #summary is contents between 'Build Status Summary' and 'Build Statis Details'
+        summary=build_result.split('Build Status Summary')[1].split('Build Statis Details')[0]
+
+        total_pkg_num=summary.split('<td>')[1]
+        succeeded_pkg_num=summary.split('<td>')[2]
+        export_err_pkg_num=summary.split('<td>')[3]
+        expansion_err_pkg_num=summary.split('<td>')[4]
+        build_err_pkg_num=summary.split('<td>')[5]
+
+        if total_pkg_num == succeeded_pkg_num:
+            status = 'S'
+            status_reason = 'Succeeded'
+        elif export_err_pkg_num != '0':
+            status = 'F'
+            status_reason = 'Export Error'
+        elif expansion_err_pkg_num != '0':
+            status = 'F'
+            status_reason = 'Expansion Error'
+        else:
+            status = 'F'
+            status_reason = 'Build Error'
+
+        print 'GBS Build status: %s, status_reason: %s' %(status,status_reason)
+
+        return (status,status_reason)
+
+
     def _do_repo_init_sync(self,repo):
         print '----start _do_repo_init_sync-----------------------------------'
         repo_src_root=self.prj_src_root+'/'+repo
+
+        if not os.path.exists(self.prj_src_root):
+            os.mkdir(self.prj_src_root)
+
         if not os.path.exists(repo_src_root):
             os.mkdir(repo_src_root)
         os.chdir(repo_src_root)
 
+        if os.path.exists(os.path.join(repo_src_root,'.repo')):
+            repo_init_clear_files=os.path.join(repo_src_root,'.repo/manifests*')
+            cmd = 'sudo rm -rf '+repo_init_clear_files
+            subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
         #add '-u' option
-#psk        repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('GERRIT_HOSTNAME_EXTERNAL'), 
-        repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('GERRIT_HOSTNAME_EXTERNAL'),
+        repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('GERRIT_HOSTNAME_EXTERNAL'), 
+#        repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('PUBLIC_GERRIT_HOSTNAME'),
                                                          os.getenv('GERRIT_SSHPORT'))
         #add '-b' option
         repo_init_arg += ' -b '+self.t_branch
@@ -431,34 +541,6 @@ class GBSBuild:
         self.__update_gbs_conf_for_one_profile_repo(repo)
 
 
-    def __is_gbs_fullbuild_result_fail(self,repo,arch):
-        print '----start __is_gbs_fullbuild_result_fail-----------------------------------'
-
-        profile_list=os.listdir(self.build_root+'/local/repos/')
-        for profile in profile_list:
-            if profile.find(repo) != -1:
-                profile_path=profile
-
-        index_file=self.build_root+'/local/repos/'+profile_path+'/'+arch+'/index.html'
-
-        f = open(index_file,'rb')
-        build_result=f.read()
-        f.close()
-
-        #summary is contents between 'Build Status Summary' and 'Build Statis Details'
-        summary=build_result.split('Build Status Summary')[1].split('Build Statis Details')[0]
-
-        total_pkg_num=summary.split('<td>')[1]
-        succeeded_pkg_num=summary.split('<td>')[2]
-
-        if total_pkg_num == succeeded_pkg_num:
-            print 'GBS fullbuild succeeded'
-            return 0
-        else:
-            print 'There are errors on gbs fullbuild'
-            return 1
-
-
     def __find_binary_list_arg(self,repo):
         """ Find argument of --binary-list : which is same as to find spec file name in build_pkg_list"""
         print '----start __find_binary_list_arg-----------------------------------'
@@ -495,17 +577,6 @@ class GBSBuild:
 
         subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
 
-        if self.__is_gbs_fullbuild_result_fail(repo,arch):
-            # TRIGGER NEXT BUILD-MONITOR
-#            update_message="GBS Fullbuild Failed"
-#            if len(update_message) < 119:
-#                trigger_next("BUILD-MONITOR", \
-#                        {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
-#                         'snapshot_name':ref_profile['ref_snapshot_build_id'],
-#                         'gbsfullbuild_string': update_message})
-
-            raise LocalError('There are errors on GBS fullbuild for repo:%s, arch:%s' %(repo,arch))
-
 
     def do_gbs_build(self):
         print '----start do_gbs_build-----------------------------------'
@@ -522,6 +593,10 @@ class GBSBuild:
                 print 'OBS Project: %s, repository: %s, architecture: %s gbs build start'\
                        %(self.obs_prj,repo,arch)
                 self._do_repo_arch_gbs_fullbuild(repo,arch)
+                if self._get_gbs_build_result_status(repo,arch) == 'Succeeded':
+                    print 'GBS fullbuild succeeded for repo:%s , arch:%s' %(repo,arch)
+                else:
+                    raise LocalError('GBS fullbuild failed')
 
 
     def copy_build_results_to_dl_server(self):
@@ -551,11 +626,11 @@ class GBSBuild:
             if not os.path.exists(obs_liverepo):
                 os.mkdir(obs_liverepo)
 
-        obs_liverepo=os.path.join(obs_liverepo,self.gbsdbbuild_project_id)
+        obs_liverepo=os.path.join(obs_liverepo,self.dashboard_gbsbuild_num)
         if not os.path.exists(obs_liverepo):
             os.mkdir(obs_liverepo)
 
-        sync_dest=os.path.join(sync_dest,self.gbsdbbuild_project_id)
+        sync_dest=os.path.join(sync_dest,self.dashboard_gbsbuild_num)
 
         buildlogs_root = os.path.join(obs_liverepo,'buildlogs')
         os.mkdir(buildlogs_root)
@@ -622,4 +697,62 @@ class GBSBuild:
         print 'rsync sync_out_dir: %s, sync_dest: %s' %(sync_out_dir, sync_dest)
         sync(sync_out_dir, sync_dest)
 
+        return sync_out_dir
+
+    def get_rpm_info_from_gbs_build_log(self,live_out_dir,repo,arch):
+        print '----start get_rpm_info_from_gbs_build_log-----------------------------------'
+
+        rpm_file_info = []
+
+        manifest_file = self.prj_src_root+'/'+repo+'/.repo/manifests/'+self.profile+'/'+repo+'/projects.xml'
+        build_log_dir=os.path.join(live_out_dir,'buildlogs',repo,arch)
+        build_rpm_root=os.path.join(live_out_dir,repo)
+
+        with open(manifest_file,"rb") as f:
+            manifest_content=f.read()
+
+        for each_line in manifest_content.split('\n'):
+            if each_line.find('<project name=') != -1:
+                #processing for each git_repository
+                #get spec file lists
+                git_repository=each_line.split('<project name="')[1].split('"')[0]
+                git_commit_id=each_line.split('revision="')[1].split('"')[0]
+                git_repo_dir=os.path.join(self.prj_src_root,repo,git_repository)
+                spec_files = []
+                for each_file in os.listdir(os.path.join(git_repo_dir,'packaging')):
+                    if each_file.endswith('.spec'):
+                        spec_files.append(each_file.replace('.spec',''))
+                print "git_repository: %s, spec file list: %s" %(git_repository,spec_files)
+
+                #get rpm name
+                for spec_file in spec_files:
+                    spec_buildlog_name=spec_file+'.buildlog.txt'
+#                    if buildlog_file in succeeded_build_log_list:
+                    if os.path.exists(os.path.join(build_log_dir,'succeeded')):
+                        for buildlog_file in os.listdir(os.path.join(build_log_dir,'succeeded')):
+                            if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file):
+                                build_status = 'S'
+                                (build_time, built_rpm_files) = find_info_from_one_buildlog(os.path.join(build_log_dir,'succeeded',buildlog_file),spec_file)
+                                for rpm_file_name in built_rpm_files:
+                                    rpm_file_info.append({'git_repository':git_repository})
+                                    rpm_file_info[-1]['git_commit_id']=git_commit_id
+                                    rpm_file_info[-1]['spec_file_name']=spec_file
+                                    rpm_file_info[-1]['rpm_file_name']=rpm_file_name
+                                    rpm_file_info[-1]['build_time']=build_time
+                                    rpm_file_info[-1]['status']=build_status
+                                    rpm_file_info[-1]['status_reason']=''
+                    if os.path.exists(os.path.join(build_log_dir,'failed')):
+                        for buildlog_file in os.listdir(os.path.join(build_log_dir,'failed')):
+                            if spec_buildlog_name == re.sub('-\d+|.\d+','',buildlog_file):
+                                build_status = 'F'
+                                status_reason = 'Build Failed'
+                                rpm_file_info.append({'git_repository':git_repository})
+                                rpm_file_info[-1]['git_commit_id']=git_commit_id
+                                rpm_file_info[-1]['spec_file_name']=spec_file
+                                rpm_file_info[-1]['rpm_file_name']=''
+                                rpm_file_info[-1]['build_time']=''
+                                rpm_file_info[-1]['status']=build_status
+                                rpm_file_info[-1]['status_reason']=status_reason
+
+        return rpm_file_info
 
index c95dea6..ad309b8 100644 (file)
@@ -1519,6 +1519,162 @@ def update_gbsfullbuild_snapshot(content):
     query_data = (gbsfullbuild_string, snapshot)
     buildmonitor_db.do_query(query, query_data)
 
+def start_gbsdbbuild(content):
+    dashboard_gbsbuild_num = content.get('dashboard_gbsbuild_num')
+    trigger_category = content.get('trigger_category')
+    obs_prj = content.get('obs_prj')
+
+    #get info_project_id
+    query = "SELECT id FROM info_project WHERE project_name = %s"
+    query_data = (obs_prj,)
+    info_project_id = buildmonitor_db.get_value_from_query_data(query, query_data)
+
+    #set gbs_build_project
+    query = "INSERT INTO gbs_build_project (info_project_id, dashboard_gbsbuild_num, trigger_category, stage) VALUES(%s, %s, %s, 'Building')"
+    query_data = (info_project_id,dashboard_gbsbuild_num,trigger_category)
+    buildmonitor_db.do_query(query, query_data)
+
+    #get gbs_build_project_id
+    query = "SELECT id FROM gbs_build_project WHERE dashboard_gbsbuild_num = %s"
+    query_data = (dashboard_gbsbuild_num,)
+    gbs_build_project_id = buildmonitor_db.get_value_from_query_data(query, query_data)
+
+def update_gbsdbbuild_target(content):
+    repository = content.get('repository')
+    architecture = content.get('architecture')
+    start_time = content.get('bm_start_datetime')
+    end_time = content.get('bm_end_datetime')
+    dashboard_gbsbuild_num = content.get('dashboard_gbsbuild_num')
+    status = content.get('gbs_build_status')
+    status_reason = content.get('status_reason')
+
+    #get dashboard_gbsbuild_num from gbs_build_project, repository_id from build_repository_name
+    #and arch_id from build_arch_name
+    query = "SELECT gbp.id, brn.id, ban.id "\
+            "FROM gbs_build_project gbp, build_repository_name brn, build_arch_name ban "\
+            "WHERE gbp.dashboard_gbsbuild_num= %s and brn.repository= %s and ban.arch = %s"
+    query_data = (dashboard_gbsbuild_num,repository,architecture)
+    query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data)
+    (gbs_build_project_id,repository_id,arch_id) = query_result[0]
+
+    #set gbs_build_target
+    query = "INSERT INTO gbs_build_target"\
+            "(gbs_build_project_id, repository, arch, start_time, end_time, status, status_reason)"\
+            " VALUES(%s, %s, %s, %s, %s, %s, %s)"
+    query_data = (gbs_build_project_id,repository_id,arch_id,start_time,end_time,status,status_reason)
+    buildmonitor_db.do_query(query, query_data)
+
+def update_gbs_build_package(content):
+    dashboard_gbsbuild_num = content.get('dashboard_gbsbuild_num')
+    bm_repo = content.get('bm_repo')
+    bm_arch = content.get('bm_arch')
+    bm_pkg_git_repo_list = content.get('bm_pkg_git_repo_list')
+    bm_pkg_git_commit_id_list = content.get('bm_pkg_git_commit_id_list')
+    bm_pkg_spec_name_list = content.get('bm_pkg_spec_name_list')
+    bm_pkg_rpm_name_list = content.get('bm_pkg_rpm_name_list')
+    bm_pkg_build_time_list = content.get('bm_pkg_build_time_list')
+    bm_pkg_status_list = content.get('bm_pkg_status_list')
+    bm_pkg_status_reason_list = content.get('bm_pkg_status_reason_list')
+    bm_trg_count = content.get('bm_trg_count')
+    bm_pkg_count = content.get('bm_pkg_count')
+    BM_PKG_LIMIT = content.get('BM_PKG_LIMIT')
+
+    # get gbs_build_target_id
+    query = "SELECT gbt.id FROM gbs_build_target gbt, "\
+            "gbs_build_project gbp, build_repository_name brn, build_arch_name ban "\
+            "WHERE gbp.dashboard_gbsbuild_num= %s and brn.repository= %s and ban.arch = %s " \
+            "and gbt.gbs_build_project_id = gbp.id and gbt.repository = brn.id and gbt.arch = ban.id"
+    query_data = (dashboard_gbsbuild_num,bm_repo,bm_arch)
+    gbs_build_target_id = buildmonitor_db.get_value_from_query_data(query, query_data)
+    print "gbs_build_target_id: %s" %gbs_build_target_id
+
+    query_data=()
+    query = ""
+    for each_pkg_git_repo,each_pkg_commit_id in zip(bm_pkg_git_repo_list,bm_pkg_git_commit_id_list):
+        query += "(SELECT gc.id,gc.commit_id,gr.name FROM git_commit gc, git_repository gr " \
+                "WHERE gr.name = %s and gc.commit_id = %s) union "
+        query_data+=(each_pkg_git_repo,each_pkg_commit_id,)
+    query = query.rstrip("union ")
+
+    git_id_dic={}
+    query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data)
+    for git_id,git_commit_id,git_repository in query_result:
+        print "git repo db: git_id: %s git_commit_id: %s git_repository: %s"\
+               %(git_id,git_commit_id,git_repository)
+        git_id_dic[git_repository]=git_id
+
+    # bulk insert
+    query = "INSERT INTO gbs_build_package (gbs_build_target_id, git_id, " \
+            "spec_name, rpm_name, build_time, status, status_reason) " \
+            "VALUES(%s, %s, %s, %s, %s, %s, %s)"
+
+    query_list = []
+    for each_pkg_git_repo,each_pkg_spec_name,each_pkg_rpm_name,each_pkg_build_time,each_pkg_status,each_pkg_status_reason in zip(bm_pkg_git_repo_list,bm_pkg_spec_name_list,bm_pkg_rpm_name_list,bm_pkg_build_time_list,bm_pkg_status_list,bm_pkg_status_reason_list):
+        query_list.append((gbs_build_target_id, git_id_dic[each_pkg_git_repo], each_pkg_spec_name,
+                           each_pkg_rpm_name, each_pkg_build_time, each_pkg_status,
+                           each_pkg_status_reason))
+
+    buildmonitor_db.do_many_query(query, query_list)
+
+def update_gbsdbbuild_snapshot(content):
+    bm_stage = content.get('bm_stage')
+    snapshot_name = content.get('snapshot_name')
+    snapshot_url = content.get('snapshot_url')
+    start_time = content.get('bm_start_datetime')
+    end_time = content.get('bm_end_datetime')
+    status_reason = content.get('status_reason')
+    dashboard_gbsbuild_num = content.get('dashboard_gbsbuild_num')
+
+    #get gbs_build_project_id
+    query = "SELECT id FROM gbs_build_project WHERE dashboard_gbsbuild_num = %s"
+    query_data = (dashboard_gbsbuild_num,)
+    gbs_build_project_id = buildmonitor_db.get_value_from_query_data(query, query_data)
+
+    if bm_stage == 'GBSDBBuild_Snapshot_Start':
+        #set gbs_build_snapshot
+        query = "INSERT INTO gbs_build_snapshot "\
+                "(gbs_build_project_id, snapshot_name, snapshot_url, start_time, status) "\
+                "VALUES(%s, %s, %s, %s, 'R')"
+        query_data = (gbs_build_project_id,snapshot_name,snapshot_url,start_time)
+        buildmonitor_db.do_query(query, query_data)
+
+    elif bm_stage == 'GBSDBBuild_Snapshot_End' or bm_stage == 'GBSDBBuild_Snapshot_Failed':
+        if bm_stage == 'GBSDBBuild_Snapshot_End':
+            status = 'S'
+        elif bm_stage == 'GBSDBBuild_Snapshot_Failed':
+            status = 'F'
+        #update status of gbs_build_snapshot
+        query = "UPDATE gbs_build_snapshot SET end_time = %s, status = %s, status_reason = %s WHERE gbs_build_project_id = %s"
+        query_data = (end_time,status,status_reason,gbs_build_project_id)
+        buildmonitor_db.do_query(query, query_data)
+
+def update_gbsdbbuild_image(content):
+    fields = content.get('fields')
+    bm_stage = content.get('bm_stage')
+    image_status = content.get('status')
+    start_time = content.get('bm_start_datetime')
+    end_time = content.get('bm_end_datetime')
+    build_id = content.get('build_id')
+    image_size = content.get('bm_img_size')
+
+    repository = fields['repo']
+    ks_name = fields['name']
+    image_url = fields['url']
+
+    #get gbs_build_snapshot_id and repository_id
+    query = "SELECT gbs.id, brn.id FROM gbs_build_snapshot gbs, build_repository_name brn "\
+            "WHERE gbs.snapshot_name = %s and brn.repository = %s"
+    query_data = (build_id,repository)
+    query_result = buildmonitor_db.get_multi_values_from_query_data(query, query_data)
+    (gbs_build_snapshot_id,repository_id) = query_result[0]
+
+    #set gbs_build_image
+    query = "INSERT INTO gbs_build_image "\
+            "(gbs_build_snapshot_id, repository, ks_name, start_time, end_time, status, image_size, image_url) "\
+            "VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
+    query_data = (gbs_build_snapshot_id,repository_id,ks_name,start_time,end_time,image_status,image_size,image_url)
+    buildmonitor_db.do_query(query, query_data)
+
 def update_unresolvable_broken_packages(project, unresolvable_broken_packages):
 
     if unresolvable_broken_packages is None:
@@ -2830,6 +2986,26 @@ def main():
         print "[%s][%s]\n" % (__file__, bm_stage)
         update_gbsfullbuild_snapshot(content)
 
+    elif bm_stage == "GBSDBBuild_Start":
+        print "[%s][%s]\n" % (__file__, bm_stage)
+        start_gbsdbbuild(content)
+
+    elif bm_stage == "GBSDBBuild_Target":
+        print "[%s][%s]\n" % (__file__, bm_stage)
+        update_gbsdbbuild_target(content)
+
+    elif bm_stage == "GBSDBBuild_Snapshot_Start" or bm_stage == "GBSDBBuild_Snapshot_End" or bm_stage == "GBSDBBuild_Snapshot_Fail":
+        print "[%s][%s]\n" % (__file__, bm_stage)
+        update_gbsdbbuild_snapshot(content)
+
+    elif bm_stage == "GBSDBBuild_Image":
+        print "[%s][%s]\n" % (__file__, bm_stage)
+        update_gbsdbbuild_image(content)
+
+    elif bm_stage == "GBSDBBuild_Package":
+        print "[%s][%s]\n" % (__file__, bm_stage)
+        update_gbs_build_package(content)
+
     #=======================================================
     # [POST] Image / Post_Image => N/A
     #=======================================================
index da3953d..1c54189 100644 (file)
@@ -16,125 +16,72 @@ from common.gerrit import Gerrit, get_gerrit_event, GerritError, is_ref_deleted
 from common.buildservice import BuildService
 from common.utils import sync
 from common.gbsutils import GBSBuild
-from common.buildtrigger import trigger_info, trigger_next
+from common.buildtrigger import trigger_next
+
 
 # prepare related global variables
 workspace = os.getenv('WORKSPACE')
 basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/'
-#basic_url = 'http://download.tizen.org/snapshots/tizen/'
-gbs_meta_default_profile = os.getenv('GBS_META_DEFAULT_PROFILE')
-SUPPORT_PROFILES = os.getenv('SUPPORT_PROFILES')
-ARCHITECTURE = os.getenv('ARCHITECTURE')
-gbsbuild_workspace = '/srv/gbs/gbsbuild-ROOT/'
+#gbsbuild_workspace = '/data/gbsbuild-ROOT/'
+gbsbuild_workspace = '/srv/data/gbsbuild-ROOT/'
 BUILD_ROOT = gbsbuild_workspace + '/GBS-ROOT/'
 src_root = gbsbuild_workspace + '/SRC-ROOT/'
 LIVE_ROOT = gbsbuild_workspace + '/live/'
-gbs_default_build_arg='timeout 6h gbs build --threads=16 --define "jobs 8" --define "_smp_mflags -j8" --baselibs --clean-once'
-mic_default_build_arg='sudo mic --non-interactive cr auto -o '+workspace+'/mic/out '
-ref_profile={}
-ALL_REPO_ARCH_BUILD = os.getenv('ALL_REPO_ARCH_BUILD')
-rpm_arch_type_list = 'aarch64 armv7l i586 i686 noarch vanish x86_64'
-RSYNC_LIVE = os.getenv('IMG_SYNC_DEST_BASE') + '/live'
 
 class LocalError(Exception):
     """Local error exception."""
     pass
 
-
-def _check_mount_existance():
-    print '----start _check_mount_existance-----------------------------------'
-    umount_list = ''
-
-    cmd='mount'
-    result=subprocess.check_output(cmd,shell=True)
-    for line in result.split('\n'):
-        if line.find(BUILD_ROOT.replace('//','/')) != -1:
-            umount_list+=line.split('on ')[1].split(' type')[0]+' '
-
-    if umount_list:
-        print 'There exist mount which is not unmouted by previous gbs fullbuild job.\n and these lists are like below:%s' %umount_list
-        cmd='sudo umount '+umount_list
-        subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
-    else:
-        print 'No mount is left by gbsfullbuild'
-
-
-    now = datetime.now()
-
-def retrive_args_from_trigger(content):
-
-    data = {}
-
-    # Adjust
-    data["project"]      = content.get('main_project')
-    data["snapshot_num"] = content.get('source_snapshot').split("_")[-1]
-    data["full_dep_sel"] = "full build" if content.get('gbs_type') == "Full Build" else "dependency build"
-    data["new_packages"] = ""
-    data["rm_packages"] = ""
-    data["rm_image"] = "Not remove"
-    for x in content.get('packages'):
-        git_path = x.keys()[0]
-        commitid = x[git_path].get('commit')
-        include_build = 1 if x[git_path].get('build') == 'include' else 0
-        include_image = 1 if x[git_path].get('image') == 'include' else 0
-        if include_build == 0:
-            data["rm_packages"] = "%s %s" % (data["rm_packages"], "%s" % (git_path) )
-        elif include_build == 1:
-            data["new_packages"] = "%s %s" % (data["new_packages"], "%s %s" % (git_path, commitid) )
-        if include_image == 0:
-            data["rm_image"] = "Remove"
-
-    return data
-
 def main():
     """script entry point"""
 
     print '---[JOB STARTED]----------------------------------------'
 
-    if os.getenv('TRIGGER_INFO'):
-        content = trigger_info(os.getenv('TRIGGER_INFO'))
-        data = retrive_args_from_trigger(content)
-        obs_prj      = data.get('project')
-        snapshot_num = data.get('snapshot_num')
-        full_dep_sel = data.get('full_dep_sel')
-        new_pkg_list = data.get('new_packages')
-        rm_pkg_list  = data.get('rm_packages')
-        rm_from_img  = data.get('rm_image')
-    else:
-        obs_prj = os.getenv('PROJECT')
-        snapshot_num = os.getenv('SNAPSHOT')
-        full_dep_sel = os.getenv('FULL_DEP_SEL')
-        new_pkg_list = os.getenv('NEW_PKG_LIST')
-        rm_pkg_list = os.getenv('RM_PKG_LIST')
-        rm_from_img = os.getenv('RM_FROM_IMG')
-
-    now = datetime.now()
-    gbsdbbuild_project_id = '%04d%02d%02d.%02d%02d%02d' %(now.year,now.month,now.day,now.hour,now.minute,now.second)
-    _check_mount_existance()
-
-    if os.path.exists(BUILD_ROOT):
-        cmd = 'sudo rm -rf '+BUILD_ROOT
-        subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+    obs_prj = os.getenv('PROJECT')
+    snapshot_num = os.getenv('SNAPSHOT')
+    trigger_category = os.getenv('TRIGGER_CATEGORY')
+    new_pkg_list = os.getenv('NEW_PKG_LIST')
+    rm_pkg_list = os.getenv('RM_PKG_LIST')
+    rm_from_img = os.getenv('RM_FROM_IMG')
 
-    if not os.path.exists(gbsbuild_workspace):
-        os.mkdir(gbsbuild_workspace)
+    global buildmonitor_enabled
+    buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+    print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
 
-    if not os.path.exists(src_root):
-        os.mkdir(src_root)
+    now = datetime.now()
+    dashboard_gbsbuild_num = '%04d%02d%02d.%02d%02d%02d' %(now.year,now.month,now.day,now.hour,now.minute,now.second)
 
-    original_dir=os.getcwd()
+#    original_dir=os.getcwd()
 
     print 'gbs fullbuild start which links to obs project: %s' %obs_prj
 
-    gbsbuild = GBSBuild(gbsdbbuild_project_id, obs_prj, basic_url, snapshot_num, full_dep_sel, new_pkg_list, rm_pkg_list, gbsbuild_workspace)
-    gbsbuild.do_gbs_build()
-    gbsbuild.copy_build_results_to_dl_server()
-
-    os.chdir(original_dir)
-
+    gbsbuild = GBSBuild(dashboard_gbsbuild_num, obs_prj, basic_url, snapshot_num, trigger_category, new_pkg_list, rm_pkg_list, gbsbuild_workspace)
     gbsbuild_dic = gbsbuild.convert_gbsbuild_to_dictionary()
     print "gbsbuild_dic : %s" %gbsbuild_dic
 
+    if buildmonitor_enabled:
+        build_pkg_data = gbsbuild.get_build_pkg_data()
+        bm_stage = 'GBSDBBuild_Start'
+        bm_data = {"bm_stage" : bm_stage,
+                   "dashboard_gbsbuild_num" : dashboard_gbsbuild_num,
+                   "trigger_category" : trigger_category,
+                   "obs_prj" : obs_prj,
+                  }
+        trigger_next("BUILD-MONITOR-%s" % bm_stage, bm_data)
+
+#    gbsbuild.do_gbs_build()
+    for repo in gbsbuild_dic['repository']:
+        for arch in gbsbuild_dic['arch_list'][repo]:
+            print 'OBS Project: %s, repository: %s, architecture: %s gbs build start'\
+                   %(gbsbuild_dic['obs_prj'],repo,arch)
+            
+            trigger_next('gbsdbbuild_one_repoarch_build_%s_%s' % (repo, arch),\
+                    {'gbsbuild_dic':gbsbuild_dic,
+                     'repository':repo,
+                     'architecture':arch,
+                     'new_pkg_list':new_pkg_list,
+                     'rm_pkg_list':rm_pkg_list})
+
     # TRIGGER NEXT GBSFULLBUILD-CREATE-SNAPSHOT
     trigger_next("gbsdbbuild-create-snapshot",\
         {'gbsbuild_dic':gbsbuild_dic})
index 898006f..2b5ecf6 100755 (executable)
@@ -34,12 +34,20 @@ from common.backenddb import BackendDB
 from common.snapshot import Snapshot, SnapshotError, snapshot_project_enabled
 from common.utils import make_latest_link
 from common.send_mail import prepare_mail
-from common.gbsutils import GBSBuild
+from common.gbsutils import GBSBuild,gbs_remote_jenkins_build_job
 
 class LocalError(Exception):
     """Local error exception."""
     pass
 
+def bm_update_gbsdbbuild_snapshot_failed(status_reason,dashboard_gbsbuild_num):
+    bm_stage = 'GBSDBBuild_Snapshot_Failed'
+    bm_end_datetime = datetime.datetime.now()
+    bm_data = {"bm_stage": bm_stage,
+               "bm_end_datetime" : str(bm_end_datetime),
+               "dashboard_gbsbuild_num" : dashboard_gbsbuild_num
+              }
+    trigger_next("BUILD-MONITOR-4-%s" %bm_stage, bm_data)
 
 def replace_ks_file_for_gbsdbbuild(kickstart,build_id):
     gbsdbbuild_dl_postfix = os.getenv('GBSDBBUILD_DL_POSTFIX')
@@ -50,7 +58,7 @@ def replace_ks_file_for_gbsdbbuild(kickstart,build_id):
         snapshot_build_id = snapshot_build_id+x+'_'
     snapshot_build_id = snapshot_build_id.rstrip('_')
 
-    gbsdbbuild_project_id = build_id.split('_')[-1]
+    dashboard_gbsbuild_num = build_id.split('_')[-1]
 
     for line in kickstart.split('\n'):
         if line.find('snapshots/tizen') != -1:
@@ -85,7 +93,7 @@ def prepare_trigger_data(images, build_id, path_repo, project,
         trigger_snapdiff[repo_name] = []
         for ksname, kickstart in images[repo_name].ksi.items():
             print 'ksname: %s\nkickstart:%s' %(ksname,kickstart)
-            #temprary
+            #temprary test
 #            if ksname.find('mobile-wayland-armv7l-tm1') == -1 and ksname.find('iot-headless-2parts-armv7l-rpi3') == -1:
 #            if ksname.find('iot-headless-2parts-armv7l-rpi3') == -1:
 #                continue
@@ -173,7 +181,7 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
 #        local_targets  = snapshot.targets
 #        local_path     = snapshot.path
 #        local_dir      = snapshot.dir
-        local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsdbbuild_project_id']
+        local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['dashboard_gbsbuild_num']
         local_targets  = create_local_targets(gbsbuild_dic)
         local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'])
         local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id)
@@ -185,17 +193,23 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
         print '\t%s\n\t%s\n\t%s\n\t%s\n\n' % (local_build_id, local_targets, local_path, local_dir)
         sys.stdout.flush()
     except SnapshotError, err:
+        if buildmonitor_enabled:
+            status_reason = "No Snapshot Info"
+            bm_update_gbsdbbuild_snapshot_failed(status_reason,gbsbuild_dic['dashboard_gbsbuild_num'])
         raise LocalError("Error getting snapshot info: %s" % str(err))
 
     targets = local_targets
     live_repo_path = os.path.join(live_repo_base,
-                                  project.replace(':', ':/'),gbsbuild_dic['gbsdbbuild_project_id'])
+                                  project.replace(':', ':/'),gbsbuild_dic['dashboard_gbsbuild_num'])
     parent_snapshot_path = os.path.join(base_path,'snapshots/tizen',
                                         gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],
                                         gbsbuild_dic['snapshot_build_id'])
     repos = {}
     imagedatas = {}
 
+    snapshot_name = local_build_id
+    snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),local_dir,gbsbuild_dic['snapshot_build_id'],snapshot_name)
+
     # Convert live repo to download structure
     for repo in targets:
         repomaker = RepoMaker(local_build_id, local_path)
@@ -225,6 +239,9 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
             img_conf_list = list(img_conf)
             # whether exist package of image-configuration
             if not img_conf_list:
+                if buildmonitor_enabled:
+                    status_reason = "No Snapshot Info"
+                    bm_update_gbsdbbuild_snapshot_failed(status_reason,gbsbuild_dic['dashboard_gbsbuild_num'])
                 #raise LocalError("Image configuration not found in %s" %
                 #        snapshot.path)
                 print "Image configuration not found in %s, repo:%s" %(snapshot_path, repo['Name'])
@@ -251,17 +268,7 @@ def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
                                     os.getenv('GERRIT_FETCH_URL'),
                                     os.getenv('GERRIT_REVIEW_URL'),
                                     live_repo_path, repo['Architectures'])
-        # Check duplicated items
-#        path_list = [ t.keys()[0] for t in manifest_items if t.keys()[0] ]
-#        duplicated = set([x for x in path_list if path_list.count(x) > 1])
-#        if len(duplicated) >= 1:
-#            prepare_mail('error_report_manifest.env', \
-#                         'Snapshot %s have duplicated packages' % (local_build_id), \
-#                         '\nDuplicated items: %s' % duplicated, \
-#                         os.getenv('NOREPLY_EMAIL_SENDER'), \
-#                         os.getenv('MAILINGLIST_SYSTEM').split(','))
-
-    print 'psk-test\n%s' %imagedatas
+
     return {'project': project,
             'repo': repos,
             'repo_path': os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id),
@@ -289,11 +296,32 @@ def main():
     snapshot_name = gbsbuild_dic['snapshot_build_id']
     project = gbsbuild_dic['obs_prj']
 
+    snapshot_name = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['dashboard_gbsbuild_num']
+    snapshot_url = os.path.join(os.getenv('URL_PUBLIC_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'),\
+                       'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],\
+                       gbsbuild_dic['snapshot_build_id'],snapshot_name)
+    bm_start_datetime = datetime.datetime.now()
+
+##    gbs_remote_jenkins_build_job({'bm_stage':'GBSDBBuild_Snapshot_Start',
+##                                  'gbsbuild_dic':gbsbuild_dic,
+##                                  'snapshot_name':snapshot_name,
+##                                  'snapshot_url':snapshot_url})
+
     # Init backend database
     redis_host = os.getenv("REDIS_HOST")
     redis_port = int(os.getenv("REDIS_PORT"))
     backenddb = BackendDB(redis_host, redis_port)
 
+    if buildmonitor_enabled:
+        bm_stage = 'GBSDBBuild_Snapshot_Start'
+        bm_data = {"bm_stage": bm_stage,
+                   "snapshot_name": snapshot_name,
+                   "snapshot_url": snapshot_url,
+                   "bm_start_datetime": str(bm_start_datetime),
+                   "dashboard_gbsbuild_num" : gbsbuild_dic['dashboard_gbsbuild_num']
+                  }
+        trigger_next("BUILD-MONITOR-1-%s" % bm_stage, bm_data)
+
     repo_data = make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic)
 
     # prepare trigger data for image creation jobs and snapdiff sync jobs
@@ -310,23 +338,19 @@ def main():
     data = repo_data.copy()
     # remove unused item
     data.pop('imagedata')
+    data['gbsbuild_dic'] = gbsbuild_dic
     trigger_next("gbsfullbuild-buildlogs", data)
 
-    # TRIGGER NEXT BUILD-MONITOR-Success
-#    update_message="Succeeded"
-#    if len(update_message) < 119:
-#        trigger_next("BUILD-MONITOR-Success", \
-#                {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
-#                 'snapshot_name':snapshot_name,
-#                 'gbsfullbuild_string': update_message})
-
-    # TRIGGER NEXT BUILD-MONITOR-Fail
-#    update_message="Image Creation Failed"
-#    if len(update_message) < 119:
-#        trigger_next("BUILD-MONITOR-Failed", \
-#                {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
-#                 'snapshot_name':snapshot_name,
-#                 'gbsfullbuild_string': update_message})
+    if buildmonitor_enabled:
+        bm_stage = 'GBSDBBuild_Snapshot_End'
+        bm_end_datetime = datetime.datetime.now()
+        bm_data = {"bm_stage": bm_stage,
+                   "snapshot_name": snapshot_name,
+                   "snapshot_url": snapshot_url,
+                   "bm_end_datetime" : str(bm_end_datetime),
+                   "dashboard_gbsbuild_num" : gbsbuild_dic["dashboard_gbsbuild_num"]
+                  }
+        trigger_next("BUILD-MONITOR_%s" %bm_stage, bm_data)
 
 
 if __name__ == '__main__':
diff --git a/job_gbsdbbuild_one_repoarch_build.py b/job_gbsdbbuild_one_repoarch_build.py
new file mode 100644 (file)
index 0000000..7e9e821
--- /dev/null
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import atexit
+import subprocess
+import re
+import glob
+import datetime
+
+from common.gbsutils import GBSBuild,gbs_remote_jenkins_build_job
+from common.buildtrigger import trigger_next,trigger_info
+
+basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/'
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+
+def _check_mount_existance(build_root):
+    print '----start _check_mount_existance-----------------------------------'
+    umount_list = ''
+
+    cmd='mount'
+    result=subprocess.check_output(cmd,shell=True)
+    for line in result.split('\n'):
+        if line.find(build_root.replace('//','/')) != -1:
+            umount_list+=line.split('on ')[1].split(' type')[0]+' '
+
+    if umount_list:
+        print 'There exist mount which is not unmouted by previous gbs fullbuild job.\n and these lists are like below:%s' %umount_list
+        cmd='sudo umount '+umount_list
+        subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+    else:
+        print 'No mount is left by gbsfullbuild'
+
+def trigger_bm_for_gbs_build_package(dashboard_gbsbuild_num,bm_pkg_info,bm_repo,bm_arch):
+    print '----start get_bm_data_for_packages-----------------------------------'
+
+    bm_pkg_git_repo_list = []
+    bm_pkg_git_commit_id_list = []
+    bm_pkg_spec_name_list = []
+    bm_pkg_rpm_name_list = []
+    bm_pkg_build_time_list = []
+    bm_pkg_status_list = []
+    bm_pkg_status_reason_list = []
+    bm_trg_count = 0
+    bm_pkg_count = 0
+    BM_PKG_LIMIT = 1100
+
+    for bm_pkg in bm_pkg_info:
+        bm_pkg_git_repo_list.append(bm_pkg['git_repository'])
+        bm_pkg_git_commit_id_list.append(bm_pkg['git_commit_id'])
+        bm_pkg_spec_name_list.append(bm_pkg['spec_file_name'])
+        bm_pkg_rpm_name_list.append(bm_pkg['rpm_file_name'])
+        bm_pkg_build_time_list.append(bm_pkg['build_time'])
+        bm_pkg_status_list.append(bm_pkg['status'])
+        bm_pkg_status_reason_list.append(bm_pkg['status_reason'])
+        # divide the big pkgs
+        bm_pkg_count += 1
+        if bm_pkg_count >= BM_PKG_LIMIT:
+            # for trigger
+            bm_stage = 'GBSDBBuild_Package'
+            bm_data = {"bm_stage" : bm_stage,
+                       "dashboard_gbsbuild_num" : dashboard_gbsbuild_num,
+                       "bm_repo" : bm_repo,
+                       "bm_arch" : bm_arch,
+                       "bm_pkg_git_repo_list" : bm_pkg_git_repo_list,
+                       "bm_pkg_git_commit_id_list" : bm_pkg_git_commit_id_list,
+                       "bm_pkg_spec_name_list" : bm_pkg_spec_name_list,
+                       "bm_pkg_rpm_name_list" : bm_pkg_rpm_name_list,
+                       "bm_pkg_build_time_list" : bm_pkg_build_time_list,
+                       "bm_pkg_status_list" : bm_pkg_status_list,
+                       "bm_pkg_status_reason_list" : bm_pkg_status_reason_list,
+                       "bm_trg_count" : bm_trg_count,
+                       "bm_pkg_count" : bm_pkg_count,
+                       "BM_PKG_LIMIT" : BM_PKG_LIMIT,
+                      }
+            trigger_next("BUILD-MONITOR-2-%s-%s-%s-trg_%s" % (bm_stage, bm_repo, bm_arch, bm_trg_count), bm_data)
+            # clear the data
+            bm_pkg_count = 0
+            bm_trg_count += 1
+            bm_pkg_git_repo_list = []
+            bm_pkg_git_commit_id_list = []
+            bm_pkg_spec_name_list = []
+            bm_pkg_rpm_name_list = []
+            bm_pkg_build_time_list = []
+            bm_pkg_status_list = []
+            bm_pkg_status_reason_list = []
+
+        # for rest pkgs
+        bm_stage = 'GBSDBBuild_Package'
+        bm_data = {"bm_stage" : bm_stage,
+                   "dashboard_gbsbuild_num" : dashboard_gbsbuild_num,
+                   "bm_repo" : bm_repo,
+                   "bm_arch" : bm_arch,
+                   "bm_pkg_git_repo_list" : bm_pkg_git_repo_list,
+                   "bm_pkg_git_commit_id_list" : bm_pkg_git_commit_id_list,
+                   "bm_pkg_spec_name_list" : bm_pkg_spec_name_list,
+                   "bm_pkg_rpm_name_list" : bm_pkg_rpm_name_list,
+                   "bm_pkg_build_time_list" : bm_pkg_build_time_list,
+                   "bm_pkg_status_list" : bm_pkg_status_list,
+                   "bm_pkg_status_reason_list" : bm_pkg_status_reason_list,
+                   "bm_trg_count" : bm_trg_count,
+                   "bm_pkg_count" : bm_pkg_count,
+                   "BM_PKG_LIMIT" : BM_PKG_LIMIT,
+                  }
+        trigger_next("BUILD-MONITOR-2-%s-%s-%s-trg_%s" % (bm_stage, bm_repo, bm_arch, bm_trg_count), bm_data)
+
+
+def main():
+    """script entry point"""
+
+    print '---[JOB STARTED]----------------------------------------'
+
+    content = trigger_info(os.getenv("TRIGGER_INFO"))
+    gbsbuild_dic = content['gbsbuild_dic']
+    repository = content['repository']
+    architecture = content['architecture']
+    new_pkg_list = content['new_pkg_list']
+    rm_pkg_list = content['rm_pkg_list']
+
+    global buildmonitor_enabled
+    buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+    print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
+
+    original_dir=os.getcwd()
+
+    _check_mount_existance(gbsbuild_dic['build_root'])
+
+    if os.path.exists(gbsbuild_dic['build_root']):
+        cmd = 'sudo rm -rf '+gbsbuild_dic['build_root']
+        subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+    if not os.path.exists(gbsbuild_dic['gbsbuild_workspace']):
+        os.mkdir(gbsbuild_dic['gbsbuild_workspace'])
+
+    if not os.path.exists(gbsbuild_dic['gbsbuild_workspace']+'/SRC-ROOT'):
+        os.mkdir(gbsbuild_dic['gbsbuild_workspace']+'/SRC-ROOT')
+
+
+    print 'gbs fullbuild start which links to obs project: %s' %gbsbuild_dic['obs_prj']
+
+    bm_start_datetime = datetime.datetime.now()
+    
+    gbsbuild_sub = GBSBuild(gbsbuild_dic['dashboard_gbsbuild_num'], gbsbuild_dic['obs_prj'], basic_url,\
+        gbsbuild_dic['snapshot_num'], gbsbuild_dic['trigger_category'], new_pkg_list, rm_pkg_list,\
+        gbsbuild_dic['gbsbuild_workspace'])
+    gbsbuild_sub_dic = gbsbuild_sub.convert_gbsbuild_to_dictionary()
+    print "gbsbuild_sub_dic : %s" %gbsbuild_sub_dic
+
+    print 'OBS Project: %s, repository: %s, architecture: %s gbs build start'\
+        %(gbsbuild_dic['obs_prj'],repository,architecture)
+    gbsbuild_sub._do_repo_init_sync(repository)
+    gbsbuild_sub._do_repo_arch_gbs_fullbuild(repository,architecture)
+    (gbs_build_status,gbs_build_status_reason) = gbsbuild_sub._get_gbs_build_result_status(repository,architecture)
+    
+    live_out_dir=gbsbuild_sub.copy_build_results_to_dl_server()
+
+    os.chdir(original_dir)
+
+    if buildmonitor_enabled:
+        bm_end_datetime = datetime.datetime.now()
+        bm_stage = 'GBSDBBuild_Target'
+        bm_data = {"bm_stage": bm_stage,
+                   "repository": repository,
+                   "architecture": architecture,
+                   "bm_start_datetime": str(bm_start_datetime),
+                   "bm_end_datetime" : str(bm_end_datetime),
+                   "gbs_build_status" : gbs_build_status,
+                   "dashboard_gbsbuild_num" : gbsbuild_dic['dashboard_gbsbuild_num']
+                  }
+        trigger_next("BUILD-MONITOR-1-%s" % bm_stage, bm_data)
+
+        bm_pkg_info=gbsbuild_sub.get_rpm_info_from_gbs_build_log(live_out_dir,repository,architecture)
+        print "bm_pkg_info: %s" %bm_pkg_info
+        trigger_bm_for_gbs_build_package(gbsbuild_dic['dashboard_gbsbuild_num'],bm_pkg_info,repository,architecture)
+
+
+if __name__ == '__main__':
+    sys.exit(main())
+
index e249361..1c88ba1 100755 (executable)
@@ -37,12 +37,18 @@ def main():
 
     print '---[JOB STARTED: buildlog ]-------------------------'
 
-    live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSFULLBUILD_DL_POSTFIX'))
-
     content = trigger_info(os.getenv("TRIGGER_INFO"))
 
+    gbsbuild_dic = content.get("gbsbuild_dic")
     obs_prj = content.get("project")
-    sync_src = os.path.join(live_repo_base,obs_prj.replace(':',':/'),'buildlogs')
+    if gbsbuild_dic:
+        live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'))
+        sync_src = os.path.join(live_repo_base,obs_prj.replace(':',':/'),
+                       gbsbuild_dic['dashboard_gbsbuild_num'],'buildlogs')
+    else:
+        live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSFULLBUILD_DL_POSTFIX'))
+        sync_src = os.path.join(live_repo_base,obs_prj.replace(':',':/'),'buildlogs')
+
     sync_dest = os.path.join(os.getenv('PATH_REPO_BASE'),content['repo_path'],'builddata/buildlogs')
 
     if os.path.exists(sync_dest):
index 786bc8d..7403879 100755 (executable)
@@ -207,7 +207,6 @@ def main():
     print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
     if buildmonitor_enabled:
         bm_start_datetime = datetime.datetime.now()
-
     # Check if environment variables are set
     for var in ('WORKSPACE', 'IMG_SYNC_DEST_BASE'):
         if not os.getenv(var):
@@ -216,6 +215,10 @@ def main():
 
     fields = trigger_info(os.getenv('TRIGGER_INFO'))
 
+    # Don't need build monitor for gbsfullbuild
+    if fields['repo_path'].find(os.getenv("GBSFULLBUILD_DL_POSTFIX")) != -1:
+        buildmonitor_enabled = 0
+
     # Check if we've got required fields in TRIGGER_INFO
     for field in ('kickstart', 'name', 'buildid', 'repo_path'):
         if field not in fields:
@@ -238,7 +241,6 @@ def main():
     ksf = os.path.join(outdir, '%s.ks' % name)
     with open(ksf, 'w') as ks_fh:
         ks_fh.write(fields["kickstart"])
-
     if int(os.getenv('USE_VM')):
         vm_image = os.getenv("VM_IMAGE",
                              os.path.join(os.getenv('JENKINS_HOME'),
@@ -287,7 +289,6 @@ def main():
     if ret:
         print 'Error: mic returned %d' % ret
         status = 'failed'
-
     # sync image, logs to SYNC_DEST
     sync_dest = os.path.join(os.getenv('IMG_SYNC_DEST_BASE'),
                              fields['repo_path'])
@@ -354,7 +355,6 @@ def main():
         data['download_num'] =  int(fields['download_num'])
 
     trigger_next("POST-IMAGE-CREATION", data)
-
     if status == 'success':
         print "The build was successful."
         fields["image_xml"] = xml_string
@@ -385,9 +385,8 @@ def main():
             else:
                 print '[%s] %s does not exist!!\n' % (__file__, bm_img_path)
                 bm_img_size = 0
-
             bm_end_datetime = datetime.datetime.now()
-            bm_stage = 'Image'
+            bm_stage = 'GBSDBBuild_Image'
             bm_data = {"bm_stage": bm_stage,
                        "status" : status,
                        "fields" : fields,
@@ -403,7 +402,7 @@ def main():
             fields["status"] = status
             fields["url"] = url
             bm_end_datetime = datetime.datetime.now()
-            bm_stage = 'Image'
+            bm_stage = 'GBSDBBuild_Image'
             bm_data = {"bm_stage": bm_stage,
                        "status" : status,
                        "fields" : fields,
index f8dd2b2..8b50239 100644 (file)
@@ -184,6 +184,7 @@ fi
 %{destdir}/job_obs_project_manager.py
 %{destdir}/job_gbs_dashboard_build.py
 %{destdir}/job_gbsdbbuild_create_snapshot.py
+%{destdir}/job_gbsdbbuild_one_repoarch_build.py
 
 %files common
 %defattr(-,jenkins,jenkins)