SPIN update 31/143331/1
authorhyokeun <hyokeun.jeon@samsung.com>
Wed, 9 Aug 2017 09:37:47 +0000 (18:37 +0900)
committerhyokeun <hyokeun.jeon@samsung.com>
Wed, 9 Aug 2017 09:37:47 +0000 (18:37 +0900)
Change-Id: I370c8e19e3ce20272bddbacdb43dc18aa4722154

common/backenddb.py
common/git_obs_mapping.py
common/mapping.py
job_buildmonitor.py
job_litmus_jira_issue_receiver.py
job_repa.py
job_trbs_test_result_receiver.py

index 35b193a..7f094c2 100644 (file)
@@ -30,6 +30,7 @@ Example of usage:
  dbobj.set_repo('Project-Main', repo)
 """
 
+import os
 import redis
 import yaml
 import json
index a561716..a1853b0 100644 (file)
@@ -162,7 +162,11 @@ class GitObsMapping:
                 attrib["submission"] = self.cur_submission
                 attrib["OBS_project"] = self.cur_OBS_project
                 attrib["OBS_staging_project"] = self.cur_OBS_staging_project
-                attrib["OBS_package"] = xml.attrib["OBS_package"]
+
+                attrib["OBS_package"] = project_name[project_name.rfind("/")+1:]
+                if "OBS_package" in xml.attrib:
+                    attrib["OBS_package"] = xml.attrib["OBS_package"]
+
                 self._add_mapping("project", project_name, self.cur_branches, attrib)
 
             self.cur_project = ""
index 620bc7e..e113b48 100644 (file)
@@ -281,6 +281,33 @@ def get_xml_file_list(path):
                 file_list.append(os.path.join(path, file))
     return file_list
 
+def git_obs_map_dryrun(local_dir):
+
+    mapping_prj = os.getenv("MAPPING_PRJ")
+
+    git_obs_mapping_path = local_dir
+
+    try:
+        mapping_path_v1 = '{0}/git-obs-mapping.xml'.format(git_obs_mapping_path)
+        # get mappings v1
+        mymapping = Mapping(mapping_path_v1)
+        obs_prjs = mymapping.get_submit_mapping('/', None)
+        # get v2 mapping files list
+        mapping_path_v2 = '{0}/profiles/'.format(git_obs_mapping_path)
+        mapping_v2_file_lists = get_xml_file_list(mapping_path_v2)
+        # get mappings v2
+        for file in mapping_v2_file_lists:
+            mymapping_v2 = MappingV2(file)
+            obs_prjs.extend(mymapping_v2.get_submit_mapping('/', None))
+        # remove overlapped items
+        obs_prjs = remove_overlaps(obs_prjs)
+        for obs_prj in obs_prjs:
+            print '    %s' % [ obs_prj[x] for x in obs_prj if x is not 'config' ]
+    except Exception as err:
+        return '\n\n%s' % repr(err)
+
+    return ''
+
 def git_obs_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
                 gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None):
     """
@@ -305,7 +332,6 @@ def git_obs_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
                 gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
             raise MappingError('Error cloning %s' % mapping_prj)
 
-
     # get mappings v1
     mymapping = Mapping(mapping_path_v1)
     obs_prjs = mymapping.get_submit_mapping(gerrit_prj, gerrit_branch)
index 62eaac8..496679b 100644 (file)
@@ -244,8 +244,8 @@ def package_build_for_sr_detail_sr_stage(git_tag, bm_start_datetime,
     #print '[%s] curr_sr_status_id(%s)\n' % (__file__, curr_sr_status_id)
 
     # Wait 60 seconds to find the info_project_id
-    print '[%s] wait 60 sec\n' % (__file__)
-    sleep(60)
+    #print '[%s] wait 60 sec\n' % (__file__)
+    #sleep(60)
 
     ### for sr_status_detail & sr_stage
     # bm_src_project_lst is needed for multi profile
@@ -1446,8 +1446,8 @@ def TRBS_package_build_for_sr_detail_sr_stage(git_tag, bm_start_datetime,
     #print '[%s] curr_sr_status_id(%s)\n' % (__file__, curr_sr_status_id)
 
     # Wait 60 seconds to find the info_project_id
-    print '[%s] wait 60 sec\n' % (__file__)
-    sleep(60)
+    #print '[%s] wait 60 sec\n' % (__file__)
+    #sleep(60)
 
     ### for sr_status_detail & sr_stage
     # bm_src_project_lst is needed for multi profile
index f78e258..7eadf84 100644 (file)
@@ -86,16 +86,15 @@ if __name__ == "__main__":
 
 
 
+
+
     # load received data
     if CONFIG['BASE']['location'] != 'internal':
 
-        trbs_data = os.getenv('file0')
+        trbs_data = os.getenv('TRBS_DATA').replace(' ', '+')
 
         if trbs_data:
-            f = open( 'file0', 'r')
-            RAW = unicode_to_str(json.loads(f.read()))
-            f.close()
-
+            RAW = unicode_to_str(json.loads(base64.b64decode(trbs_data)))
         else:
             sys.exit( "[Error] TRBS_DATA is empty!\n" )
 
index 21cf78f..46efea3 100644 (file)
@@ -89,7 +89,7 @@ def accept_or_reject(build, prerelease_proj, git_tag, state, comment=''):
     global status_str
     status_str = "Succeeded"
 
-    submitter = info['submitter']
+    submitter = info.get('submitter', '')
     projects = '[' + ', '.join(info['projects']) + ']'
     message = ''
     if submitter:
index 50b58ba..f170ab1 100644 (file)
@@ -11,17 +11,14 @@ import re
 import datetime
 from codebase import *
 
-def is_test_succeeded(data, is_ref_exist, is_perf_exist):
+def check_test_failure(data, is_ref_exist, is_perf_exist):
 
-    # if ref results exist, compare with them.
+    # if ref result is existed, then compare with ref result.
     if is_ref_exist:
         for suite in data['TCT']['SUITE'].keys():
-            c = 'pass_case'
-            if data['TCT']['SUITE'][suite][c] < data['REF']['SUITE'][suite][c]:
-                return 0
-            case_tests = ['fail_case', 'block_case', 'na_case']
+            case_tests = ['pass_case', 'fail_case', 'block_case', 'na_case']
             for c in case_tests:
-                if data['TCT']['SUITE'][suite][c] > data['REF']['SUITE'][suite][c]:
+                if data['TCT']['SUITE'][suite][c] != data['REF']['SUITE'][suite][c]:
                     return 0
     else:
         if data['TCT']['COUNT']['total_case'] != data['TCT']['COUNT']['pass_case']:
@@ -37,25 +34,17 @@ def is_test_succeeded(data, is_ref_exist, is_perf_exist):
 
     return 1
 
-def insert_test_stage(data, MYSQL, sr_id, snapshot, is_ref_exist, is_perf_exist ):
+def insert_test_stage(data, MYSQL, sr_id, is_ref_exist, is_perf_exist ):
     status = 'F';
-    if is_test_succeeded(data, is_ref_exist, is_perf_exist):
+    if check_test_failure(data, is_ref_exist, is_perf_exist):
         status = 'S';
 
-    build_project_id = 0
-    sql_data = (snapshot,)
-    sql = "SELECT build_project_id FROM build_snapshot WHERE snapshot_name=%s LIMIT 1"
-    MYSQL['cursor'].execute(sql, sql_data)
-    row = MYSQL['cursor'].fetchall()
-    if len(row) > 0:
-        build_project_id = row[0]['build_project_id']
-
     cur_time = datetime.datetime.now()
 
-    sql_data = (sr_id, cur_time, cur_time, status, build_project_id, status)
+    sql_data = (sr_id, cur_time, cur_time, status, status)
     sql = "INSERT INTO sr_stage (sr_status_id, info_stage_id, "\
         " stage_start_time, stage_end_time, stage_status, build_project_id)"\
-        " VALUES (%s, 101, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE stage_status=%s"
+        " VALUES (%s, 101, %s, %s, %s, 0) ON DUPLICATE KEY UPDATE stage_status=%s"
     pprint.pprint(sql)
     pprint.pprint(sql_data)
     MYSQL['cursor'].execute(sql, sql_data)
@@ -812,7 +801,6 @@ if __name__ == "__main__":
                         print "[ERROR] MYSQL : %s (%s)" % (str(e), sql)
 
 
-    snapshot = CONFIG['RAW']['TCT']['environment']['build_id']
-    insert_test_stage(CONFIG['RAW'], MYSQL, DINFO['sr_status_id'], snapshot, isExistRef , isExistPerf)
+    insert_test_stage(CONFIG['RAW'], MYSQL, DINFO['sr_status_id'] , isExistRef , isExistPerf)
 
     exit(0)