dbobj.set_repo('Project-Main', repo)
"""
+import os
import redis
import yaml
import json
attrib["submission"] = self.cur_submission
attrib["OBS_project"] = self.cur_OBS_project
attrib["OBS_staging_project"] = self.cur_OBS_staging_project
- attrib["OBS_package"] = xml.attrib["OBS_package"]
+
+ attrib["OBS_package"] = project_name[project_name.rfind("/")+1:]
+ if "OBS_package" in xml.attrib:
+ attrib["OBS_package"] = xml.attrib["OBS_package"]
+
self._add_mapping("project", project_name, self.cur_branches, attrib)
self.cur_project = ""
file_list.append(os.path.join(path, file))
return file_list
+def git_obs_map_dryrun(local_dir):
+
+ mapping_prj = os.getenv("MAPPING_PRJ")
+
+ git_obs_mapping_path = local_dir
+
+ try:
+ mapping_path_v1 = '{0}/git-obs-mapping.xml'.format(git_obs_mapping_path)
+ # get mappings v1
+ mymapping = Mapping(mapping_path_v1)
+ obs_prjs = mymapping.get_submit_mapping('/', None)
+ # get v2 mapping files list
+ mapping_path_v2 = '{0}/profiles/'.format(git_obs_mapping_path)
+ mapping_v2_file_lists = get_xml_file_list(mapping_path_v2)
+ # get mappings v2
+ for file in mapping_v2_file_lists:
+ mymapping_v2 = MappingV2(file)
+ obs_prjs.extend(mymapping_v2.get_submit_mapping('/', None))
+ # remove overlapped items
+ obs_prjs = remove_overlaps(obs_prjs)
+ for obs_prj in obs_prjs:
+ print ' %s' % [ obs_prj[x] for x in obs_prj if x is not 'config' ]
+ except Exception as err:
+ return '\n\n%s' % repr(err)
+
+ return ''
+
def git_obs_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None):
"""
gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
raise MappingError('Error cloning %s' % mapping_prj)
-
# get mappings v1
mymapping = Mapping(mapping_path_v1)
obs_prjs = mymapping.get_submit_mapping(gerrit_prj, gerrit_branch)
#print '[%s] curr_sr_status_id(%s)\n' % (__file__, curr_sr_status_id)
# Wait 60 seconds to find the info_project_id
- print '[%s] wait 60 sec\n' % (__file__)
- sleep(60)
+ #print '[%s] wait 60 sec\n' % (__file__)
+ #sleep(60)
### for sr_status_detail & sr_stage
# bm_src_project_lst is needed for multi profile
#print '[%s] curr_sr_status_id(%s)\n' % (__file__, curr_sr_status_id)
# Wait 60 seconds to find the info_project_id
- print '[%s] wait 60 sec\n' % (__file__)
- sleep(60)
+ #print '[%s] wait 60 sec\n' % (__file__)
+ #sleep(60)
### for sr_status_detail & sr_stage
# bm_src_project_lst is needed for multi profile
+
+
# load received data
if CONFIG['BASE']['location'] != 'internal':
- trbs_data = os.getenv('file0')
+ trbs_data = os.getenv('TRBS_DATA').replace(' ', '+')
if trbs_data:
- f = open( 'file0', 'r')
- RAW = unicode_to_str(json.loads(f.read()))
- f.close()
-
+ RAW = unicode_to_str(json.loads(base64.b64decode(trbs_data)))
else:
sys.exit( "[Error] TRBS_DATA is empty!\n" )
global status_str
status_str = "Succeeded"
- submitter = info['submitter']
+ submitter = info.get('submitter', '')
projects = '[' + ', '.join(info['projects']) + ']'
message = ''
if submitter:
import datetime
from codebase import *
-def is_test_succeeded(data, is_ref_exist, is_perf_exist):
+def check_test_failure(data, is_ref_exist, is_perf_exist):
- # if ref results exist, compare with them.
+ # if ref result is existed, then compare with ref result.
if is_ref_exist:
for suite in data['TCT']['SUITE'].keys():
- c = 'pass_case'
- if data['TCT']['SUITE'][suite][c] < data['REF']['SUITE'][suite][c]:
- return 0
- case_tests = ['fail_case', 'block_case', 'na_case']
+ case_tests = ['pass_case', 'fail_case', 'block_case', 'na_case']
for c in case_tests:
- if data['TCT']['SUITE'][suite][c] > data['REF']['SUITE'][suite][c]:
+ if data['TCT']['SUITE'][suite][c] != data['REF']['SUITE'][suite][c]:
return 0
else:
if data['TCT']['COUNT']['total_case'] != data['TCT']['COUNT']['pass_case']:
return 1
-def insert_test_stage(data, MYSQL, sr_id, snapshot, is_ref_exist, is_perf_exist ):
+def insert_test_stage(data, MYSQL, sr_id, is_ref_exist, is_perf_exist ):
status = 'F';
- if is_test_succeeded(data, is_ref_exist, is_perf_exist):
+ if check_test_failure(data, is_ref_exist, is_perf_exist):
status = 'S';
- build_project_id = 0
- sql_data = (snapshot,)
- sql = "SELECT build_project_id FROM build_snapshot WHERE snapshot_name=%s LIMIT 1"
- MYSQL['cursor'].execute(sql, sql_data)
- row = MYSQL['cursor'].fetchall()
- if len(row) > 0:
- build_project_id = row[0]['build_project_id']
-
cur_time = datetime.datetime.now()
- sql_data = (sr_id, cur_time, cur_time, status, build_project_id, status)
+ sql_data = (sr_id, cur_time, cur_time, status, status)
sql = "INSERT INTO sr_stage (sr_status_id, info_stage_id, "\
" stage_start_time, stage_end_time, stage_status, build_project_id)"\
- " VALUES (%s, 101, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE stage_status=%s"
+ " VALUES (%s, 101, %s, %s, %s, 0) ON DUPLICATE KEY UPDATE stage_status=%s"
pprint.pprint(sql)
pprint.pprint(sql_data)
MYSQL['cursor'].execute(sql, sql_data)
print "[ERROR] MYSQL : %s (%s)" % (str(e), sql)
- snapshot = CONFIG['RAW']['TCT']['environment']['build_id']
- insert_test_stage(CONFIG['RAW'], MYSQL, DINFO['sr_status_id'], snapshot, isExistRef , isExistPerf)
+ insert_test_stage(CONFIG['RAW'], MYSQL, DINFO['sr_status_id'] , isExistRef , isExistPerf)
exit(0)