From 09fe5ce3804cce740abd64ed2ca72e5a13ec6589 Mon Sep 17 00:00:00 2001 From: mao xiaojuan Date: Fri, 25 May 2018 11:18:41 +0800 Subject: [PATCH] Add pmb scripts for Tools&Services development and test process Change-Id: I80ae8dee7664c3b00a94059ab5c2e12d92abed9c --- debian/control | 6 + debian/jenkins-scripts-pmb.install | 1 + debian/rules | 2 +- packaging/jenkins-scripts.spec | 19 ++- pmb/config.jenkins | 4 + pmb/jiradata.txt | 1 + pmb/job_tool_history_update.py | 254 +++++++++++++++++++++++++++++++++++++ pmb/job_tool_jira_receiver.py | 152 ++++++++++++++++++++++ pmb/job_tool_sender.py | 251 ++++++++++++++++++++++++++++++++++++ 9 files changed, 688 insertions(+), 2 deletions(-) create mode 100644 debian/jenkins-scripts-pmb.install create mode 100755 pmb/config.jenkins create mode 100644 pmb/jiradata.txt create mode 100755 pmb/job_tool_history_update.py create mode 100644 pmb/job_tool_jira_receiver.py create mode 100755 pmb/job_tool_sender.py diff --git a/debian/control b/debian/control index 4697293..9022f06 100644 --- a/debian/control +++ b/debian/control @@ -22,6 +22,7 @@ Depends: ${python:Depends}, jenkins-scripts-trbs, jenkins-scripts-abs, jenkins-scripts-groovyinit, + jenkins-scripts-pmb, python-mysqldb Description: Jenkins build job will call this scripts when build job is triggered by Gerrit/OBS event. @@ -85,3 +86,8 @@ Description: App Build System scripts Depends: ${python:Depends}, jenkins-scripts-common +Package: jenkins-scripts-pmb +Architecture: all +Description: pmb scripts for Tools&Services development and test process +Depends: ${python:Depends}, + jenkins-scripts-common diff --git a/debian/jenkins-scripts-pmb.install b/debian/jenkins-scripts-pmb.install new file mode 100644 index 0000000..c074f22 --- /dev/null +++ b/debian/jenkins-scripts-pmb.install @@ -0,0 +1 @@ +debian/tmp/pmb/* /var/lib/jenkins/jenkins-scripts/pmb/ diff --git a/debian/rules b/debian/rules index f67d143..7134c8a 100755 --- a/debian/rules +++ b/debian/rules @@ -23,7 +23,7 @@ install: build # Installing package mkdir -p $(CURDIR)/debian/tmp/ install -d $(CURDIR)/debian/tmp/ - cp -r job_*.py job_*.groovy dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts dep_graph vis groovy_init_scripts trbs abs $(CURDIR)/debian/tmp/ + cp -r job_*.py job_*.groovy dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts dep_graph vis groovy_init_scripts trbs abs pmb $(CURDIR)/debian/tmp/ binary-indep: build install dh_testdir dh_testroot diff --git a/packaging/jenkins-scripts.spec b/packaging/jenkins-scripts.spec index a0cef5d..67f88d2 100644 --- a/packaging/jenkins-scripts.spec +++ b/packaging/jenkins-scripts.spec @@ -21,6 +21,7 @@ Requires: %{name}-dependsgraph = %{version}-%{release} Requires: %{name}-trbs = %{version}-%{release} Requires: %{name}-abs = %{version}-%{release} Requires: %{name}-groovyinit = %{version}-%{release} +Requires: %{name}-pmb = %{version}-%{release} BuildArch: noarch %define homedir /var/lib/jenkins @@ -104,6 +105,13 @@ Requires: %{name}-common = %{version}-%{release} %description submitobs Isolated job_submitobs to avoid package installation conflicts +%package pmb +Summary: pmb scripts for Tools&Services development and test process +Group: Development/Tools/Building + +%description pmb +pmb scripts for Tools&Services development and test process + %prep %setup -q @@ -111,7 +119,7 @@ Isolated job_submitobs to avoid package installation conflicts %install install -d %{buildroot}%{destdir} -cp -r job_*.py job_*.groovy codebase.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts vis dep_graph trbs abs %{buildroot}%{destdir}/ +cp -r job_*.py job_*.groovy codebase.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts vis dep_graph trbs abs pmb %{buildroot}%{destdir}/ install -d %{buildroot}%{destinitdir} cp -r groovy_init_scripts/{Module*,init.groovy,setup.properties.example} %{buildroot}%{destinitdir}/ install -d %{buildroot}%{homedir} @@ -355,4 +363,13 @@ fi %defattr(-,jenkins,jenkins) %{destdir}/job_submitobs.py +%files pmb +%defattr(-,jenkins,jenkins) +%dir %{destdir}/pmb +%{destdir}/pmb/config.jenkins +%{destdir}/pmb/jiradata.txt +%{destdir}/pmb/job_tool_sender.py +%{destdir}/pmb/job_tool_jira_receiver.py +%{destdir}/pmb/job_tool_history_update.py + %changelog diff --git a/pmb/config.jenkins b/pmb/config.jenkins new file mode 100755 index 0000000..7481172 --- /dev/null +++ b/pmb/config.jenkins @@ -0,0 +1,4 @@ +{ + "job" : "Tool_Prereview_Test_Receiver", + "token" : "123456" +} diff --git a/pmb/jiradata.txt b/pmb/jiradata.txt new file mode 100644 index 0000000..007675d --- /dev/null +++ b/pmb/jiradata.txt @@ -0,0 +1 @@ +{"tool_prereviewtest_status": [{"commit_id": "2018-05-20", "committer": "NULL", "commit_project": "NULL", "commit_event_type": "NULL", "test_date": "2018-05-20_01-40-00", "test_status": "SUCCESS", "test_worker_job": "Tools-gbs-full-test #432", "test_type": "full test", "gerrit": "SPIN", "commit_branch": "NULL"}]} \ No newline at end of file diff --git a/pmb/job_tool_history_update.py b/pmb/job_tool_history_update.py new file mode 100755 index 0000000..9d99e2e --- /dev/null +++ b/pmb/job_tool_history_update.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python + +import sys +import os +import json +import shutil +import subprocess +import json +import pprint +import time +import datetime +import base64 +import MySQLdb +import MySQLdb.cursors +import requests +import re + +sys.path.insert(0, os.path.join(os.getcwd(), 'jenkins-scripts')) + +from bs4 import BeautifulSoup +from common.buildmonitor_db import do_query, get_value_from_query_data, do_many_query +from common.buildmonitor_extention import BuildMonitorExtention + + + +def unicode_to_str(obj): + """convert unicode object to str""" + + if isinstance(obj, list): + return [unicode_to_str(element) for element in obj] + elif isinstance(obj, dict): + return {unicode_to_str(key) : unicode_to_str(value) for key, value in obj.iteritems()} + elif isinstance(obj, unicode): + return obj.encode('utf-8') + else: + return obj + + +class TOOLS_HISTORY_DB(BuildMonitorExtention): + + def __init__(self): + BuildMonitorExtention.__init__(self) + + def update_tools_release_history(self, option): + """write DB // tools_release_history table""" + + print option + + project = option[1] + release_id = option[2] + + table = "tools_release_version" + + ## get release_date + url = "http://download.tizen.org/tools/archive/" + release_url = url + release_id + ##release_date = datetime.datetime.now() + f = requests.get(url, timeout=30) + if f.status_code != 200: + return ("%s url error" % (url)) + + soup = BeautifulSoup(f.text, 'html.parser') + #print soup + + find_version = release_id+"/" + + response = requests.get(url, timeout=30) + + for line in response.iter_lines(): + + line = str(line.rstrip()) + + regex = re.compile(r'[0-9]{1,2}[-][A-Za-z]{0,3}[-][0-9]{4}.[0-9]{2}[:][0-9]{2}') + if find_version in line: + + release_date_list = regex.findall(line) + + if len(release_date_list) >0: + + release_date_temp = release_date_list[0] + print "release_date_temp : " , release_date_temp + + day_temp = release_date_temp.split(" ")[0] + time_temp = release_date_temp.split(" ")[1] + + print day_temp, time_temp + day = day_temp.split("-")[0] + month = day_temp.split("-")[1] + year = day_temp.split("-")[2] + + hour = time_temp.split(":")[0] + min = time_temp.split(":")[1] + if int(hour) > 12: + hour = str(int(hour) - 12) + noon = "PM" + else: + noon = "AM" + + a = month + " " + day + " " + year + " " + hour + ":" + min + noon + #a = "Jun 1 2005 1:33PM" + release_date = datetime.datetime.strptime(a,"%b %d %Y %I:%M%p") + print (a, release_date) + + break + try: + query = "SELECT id FROM %s "% (table, ) +"WHERE project = %s AND release_id = %s" + query_data = (project,release_id) + commit_id = get_value_from_query_data(query, query_data) + print "commit_id : ", commit_id + + columns_string = "(`project`,`release_id`,`release_url`, `release_distro`, `release_date`)" + #print columns_string + + ## get distro list form download URL + f = requests.get(release_url, timeout=30) + if f.status_code != 200: + return ("%s url error" % (release_url)) + + soup = BeautifulSoup(f.text, 'html.parser') + print soup + distro_list = ["CentOS","Debian","Fedora","Ubuntu","openSUSE"] + release_distro_temp = [] + current_distro = "" + release_distro = "" + for element in soup.findAll('a'): + distro = str(element.get_text()) + distro_temp = distro.split('_')[0] + if distro_temp in distro_list: + if current_distro != distro_temp: + if not current_distro: + release_distro += distro + else: + release_distro += " \n" + distro + current_distro = distro_temp + else: + release_distro += " ," + distro.split('_')[1] + release_distro += " \n" + print release_distro + + + if commit_id > 0: #update + query = "UPDATE %s SET release_date = \"%s\" WHERE release_id = " % (table, release_date) + "%s" + query_data = (release_id,) + print "update query_data", query_data + do_query(query, query_data) + + else: #insert + query = "INSERT INTO %s %s VALUES " % (table, columns_string) + "(%s, %s, %s, %s, %s)" + query_data = (project,release_id,release_url,release_distro,release_date) + print "insert query_data", query_data + do_query(query, query_data) + + except MySQLdb.IntegrityError: + print "[ERROR]", sql + print ("[ERROR] msg {}".format(error)) + + return release_date + + + def update_each_tool_history(self, option, release_date): + """write DB // tools_release_history table""" + + print option + release_version = option[2] + + table = "tool_release_status" + + # tools , release_version, release_note, tool_version; + + tools_list = ["GBS","MIC","REPA","BMAP-TOOLS"] + + url = "http://download.tizen.org/tools/archive/" + release_version + "/" + f = requests.get(url, timeout=30) + if f.status_code != 200: + return ("%s url error" % (url)) + + soup = BeautifulSoup(f.text, 'html.parser') + + for tools in tools_list: + ## get _release_note + print 'Get Release note : ' , tools + + file_name = "RELEASE_NOTES_" + tools + ".txt" + release_note = "http://download.tizen.org/tools/archive/" + release_version + "/" + file_name + + result=str(soup.findAll('a', attrs={'href': re.compile(file_name)})) + print result + + if file_name not in result: + print 'Can not Find : ' , file_name + continue + else: + response = requests.get(release_note, timeout=30) + if response.status_code != 200: + continue + + ## get tool_version + print 'Get tool version from release note : ' , tools + for line in response.iter_lines(): + line = str(line.rstrip()) + + regex = re.compile(r'[0-9]{1,3}[.][0-9]{0,3}.[0-9]{0,3}') + tool_version_list = regex.findall(line) + #print line, tool_version + if len(tool_version_list) >0 and "Release" in line: + print line, tool_version_list[0].replace(" ", "") + + tool_version = tool_version_list[0].replace(" ", "") + break + + try: + query = "SELECT id FROM %s "% (table, ) +"WHERE tools = %s AND tool_version = %s" + query_data = (tools,tool_version) + commit_id = get_value_from_query_data(query, query_data) + print "commit_id : ", commit_id + + columns_string = "(`tools`,`release_version`,`release_note`,`tool_version`,`release_date`)" + print columns_string + + if commit_id > 0: #update + query = "UPDATE %s SET release_version = \"%s\" WHERE tool_version = " % (table, release_version, ) + "%s" + query_data = (tool_version,) + print "update query_data", query_data + #do_query(query, query_data) + + else: #insert + query = "INSERT INTO %s %s VALUES " % (table, columns_string) + "(%s, %s, %s, %s, %s)" + query_data = (tools,release_version,release_note,tool_version, release_date) + print "insert query_data", query_data + do_query(query, query_data) + + except MySQLdb.IntegrityError: + print "[ERROR]", sql + print ("[ERROR] msg {}".format(error)) + + +def main(option): + """The main body""" + destination = option[3] + update_latest_release = option[4] + + tools_history_db = TOOLS_HISTORY_DB() + + if "download.tizen.org" in destination and "YES" in update_latest_release: + # update / insert tools history DB table + print "update tools release history" + release_date = tools_history_db.update_tools_release_history(option) + if type(release_date) == datetime.datetime: + tools_history_db.update_each_tool_history(option, release_date) + else: + print release_date + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/pmb/job_tool_jira_receiver.py b/pmb/job_tool_jira_receiver.py new file mode 100644 index 0000000..b60c14f --- /dev/null +++ b/pmb/job_tool_jira_receiver.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python + + +import sys +import os +import json +import shutil +import subprocess +import json +import pprint +import time +import datetime +import base64 +import MySQLdb +import MySQLdb.cursors + +sys.path.insert(0, os.path.join(os.getcwd(), 'jenkins-scripts')) + +from common.buildmonitor_db import do_query, get_value_from_query_data, do_many_query +from common.buildmonitor_extention import BuildMonitorExtention + + + +def unicode_to_str(obj): + """convert unicode object to str""" + + if isinstance(obj, list): + return [unicode_to_str(element) for element in obj] + elif isinstance(obj, dict): + return {unicode_to_str(key) : unicode_to_str(value) for key, value \ + in obj.iteritems()} + elif isinstance(obj, unicode): + return obj.encode('utf-8') + else: + return obj + + +class TOOLS_DB(BuildMonitorExtention): + + def __init__(self): + BuildMonitorExtention.__init__(self) + + def jira_update(self, table, value): + """write DB // tool_jira_issues table""" + + + for item in value: + + try: + + for k, v in item.items(): + if type(v) == str: + item[k] = v.replace("'", '"') + item[k] = item[k].rstrip('\\') + #print "[INFO] item[k] : ", k , item[k] + + + #print item_data + columns_string = "(`" + "`,`".join(item.keys())+"`)" + values_string = "','".join(map(str, item.values())) + + query = "SELECT id FROM %s WHERE jira_key = "% (table, ) +"%s" + query_data = (item['jira_key'],) + jira_key_id = get_value_from_query_data(query, query_data) + + + if jira_key_id > 0: #update + query = "UPDATE %s SET status = \"%s\" WHERE jira_key = " % (table, item['status']) + "%s" + query_data = (item['status'],) + do_query(query, query_data) + + + else: #insert + query = "INSERT INTO %s %s VALUES " % (table, columns_string) + "(%s, %s, %s, %s, %s, %s, %s)" + query_data = (item['status'],item['priority'],item['created'],item['jira_key'],item['title'],item['project'],item['assignee']) + do_query(query, query_data) + + + + except MySQLdb.IntegrityError: + print "[ERROR]", sql + print ("[ERROR] msg {}".format(error)) + + + def prereviewtest_update(self, table, value): + """write DB // tool_jira_issues table""" + + for item in value: + + try: + + for k, v in item.items(): + if type(v) == str: + item[k] = v.replace("'", '"') + item[k] = item[k].rstrip('\\') + print "[INFO] item[k] : ", k , item[k] + + + #print item_data + columns_string = "(`" + "`,`".join(item.keys())+"`)" + values_string = "','".join(map(str, item.values())) + + query = "SELECT id FROM %s "% (table, ) +"WHERE commit_id = %s AND test_type = %s" + query_data = (item['commit_id'],item['test_type']) + #query = "SELECT id FROM %s "% (table, ) +"WHERE commit_id = %s" + #query_data = item['commit_id'] + commit_id = get_value_from_query_data(query, query_data) + print "commit_id : ", commit_id + + if commit_id > 0: #update + query = "UPDATE %s SET test_status = \"%s\" WHERE commit_id = " % (table, item['commit_id']) + "%s" + query_data = (item['test_status'],) + do_query(query, query_data) + + + else: #insert + query = "INSERT INTO %s %s VALUES " % (table, columns_string) + "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + query_data = (item['commit_id'],item['committer'],item['commit_event_type'],item['test_type'],item['gerrit'],\ + item['commit_project'],item['test_date'],item['test_status'],item['test_worker_job'],item['commit_branch']) + do_query(query, query_data) + + + + except MySQLdb.IntegrityError: + print "[ERROR]", sql + print ("[ERROR] msg {}".format(error)) + + +if __name__ == "__main__": + + + tools_db = TOOLS_DB() + + f = open( 'file0', 'r') + str1 = subprocess.check_output('pwd') + print str1 + RAW = unicode_to_str(json.loads(f.read())) + f.close() + print "RAW = %s" % RAW + #print "RAW.items()= %s" % RAW.items() + + + for table, value in RAW.items(): + print table , value + if "tool_jira_issues" == table: + tools_db.jira_update(table, value) + elif "tool_prereviewtest_status" == table: + tools_db.prereviewtest_update(table, value) + + exit() + + diff --git a/pmb/job_tool_sender.py b/pmb/job_tool_sender.py new file mode 100755 index 0000000..149fba8 --- /dev/null +++ b/pmb/job_tool_sender.py @@ -0,0 +1,251 @@ +#!/usr/bin/python +#-*-coding:utf-8-*- +import os +import sys +import requests +import json +import urllib3 +import dateutil.parser + +class SendData(object): + + def load_file(self, CONFIG, file, type): + # load spin Jenkins config file + if not os.path.isfile(CONFIG[file]): + sys.exit("[ERROR] %s was not found!\n" % CONFIG[file]) + f = open(CONFIG[file], 'r') + CONFIG[type] = json.loads(f.read()) + f.close() + print '[CONFIG] %s - load config file.\n' % type + + def get_data(self, JENKINS, job_name, build_number, type, username, password): + JENKINS_SESSION = requests.Session() + JENKINS_SESSION.trust_env = False + JENKINS['item'] = [] + url = os.getenv(type + '_URL') + print '[INFO] JENKINS url : ', url, '\n' + headers = { 'content-type' : 'application/json', 'Accept-Charset' : 'UTF-8' } + data_url = url + '/job/' + job_name + '/' + build_number + '/api/json?pretty=true' + print '[INFO] Prereview test QUERY : ', data_url, '\n' + JENKINS['raw'] = JENKINS_SESSION.get( data_url, headers=headers, auth=( username, password)).json() + + def get_jira_data(self, JIRA, SEND, type, username, password): + JIRA_SESSION = requests.Session() + JIRA_SESSION.trust_env = False + + url = os.getenv(type + '_URL') + print '[INFO] JIRA QUERY : ', url, '\n' + headers = { 'content-type' : 'application/json', 'Accept-Charset' : 'UTF-8' } + JIRA['raw'] = JIRA_SESSION.get( url, headers=headers, auth=( username, password)).json() + print '[INFO] get JIRA issues (', len(JIRA['raw']['issues']), ')\n' + + maxlength = 100 + for idx, value in enumerate( JIRA['raw']['issues'] ): + + if idx >= maxlength: + break + + print 'Start : ' , value['key'] + + assign = ' ' + + try: + assign = value['fields']['assignee']['displayName'] + except : + assign = 'Unassigned' + + if 'NJTS' in value['key']: + project = 'NJTS' + elif 'DEVT' in value['key']: + project = 'DEVT' + else: + project = 'NULL' + + JIRA['item'].append( + { + 'jira_key' : value['key'], + 'project' : project, + 'title' : value['fields']['summary'], + 'assignee' : assign, + 'created' : dateutil.parser.parse( value['fields']['created'] ).strftime('%Y-%m-%d %H:%M:%S'), + 'priority' : value['fields']['priority']['name'], + 'status' : value['fields']['status']['name'], + }) + + if len( JIRA['item'] ) > 0: + SEND['tool_jira_issues'] = JIRA['item'] + + + def parse_prereviewtest_data(self, JENKINS, SEND): + for idx, value in enumerate( JENKINS['raw']['actions'][5]['parameters'] ): + + if 'GERRIT_HOST' in value['name']: + if '10.113.136.109' in value['value']: + gerrit = 'SPIN' + else: + gerrit = 'PUBLIC' + + if 'GERRIT_EVENT_TYPE' in value['name']: + commit_event_type = value['value'] + + if 'GERRIT_EVENT_ACCOUNT_NAME' in value['name']: + committer = value['value'] + + if 'GERRIT_PROJECT' in value['name']: + commit_project = value['value'] + + if 'GERRIT_REFNAME' in value['name']: + commit_branch = value['value'] + elif 'GERRIT_BRANCH' in value['name']: + commit_branch = value['value'] + + if 'GERRIT_NEWREV' in value['name']: + commit_id = value['value'] + elif 'GERRIT_PATCHSET_REVISION' in value['name']: + commit_id = value['value'] + + JENKINS['item'].append( + { + 'test_worker_job': JENKINS['raw']['fullDisplayName'], + 'test_type': 'prereview test', + 'test_status': JENKINS['raw']['result'], + 'test_date': JENKINS['raw']['id'], + #'test_date': dateutil.parser.parse( JENKINS['raw']['id'] ).strftime('%Y-%m-%d %H:%M:%S'), + 'commit_event_type': commit_event_type, + 'commit_branch': commit_branch, + 'commit_project': commit_project, + 'committer': committer, + 'commit_id': commit_id, + 'gerrit': gerrit, + }) + + if len( JENKINS['item'] ) > 0: + SEND['tool_prereviewtest_status'] = JENKINS['item'] + print ( SEND ) + + def parse_functiontest_data(self, JENKINS, SEND): + commit_event_type = 'NULL' + commit_branch = 'NULL' + commit_project = 'NULL' + committer = 'NULL' + for idx, value in enumerate( JENKINS['raw']['actions'] ): + if 'parameters' in value: + for i, v in enumerate( value['parameters'] ): + if 'TRIGGER_INFO' in v['name']: + #Daily Build case + if v['value'] == 'NULL': + commit_id = JENKINS['raw']['id'][0:10] + else: + # Auto trigger + commit_id = v['value'] + + '''if 'TRIGGER_INFO_event_type' in v['name']: + commit_event_type = v['value'] + if 'TRIGGER_INFO_branch' in v['name']: + commit_branch = v['value'] + if 'TRIGGER_INFO_project' in v['name']: + commit_project = v['value'] + if 'TRIGGER_INFO_committer' in v['name']: + committer = v['value']''' + + if '10.113.136.111' in JENKINS['raw']['url']: + gerrit = 'SPIN' + else: + gerrit = 'PUBLIC' + if 'function-test' in JENKINS['raw']['url']: + test_type = 'function test' + if 'full-test' in JENKINS['raw']['url']: + test_type = 'full test' + + index = JENKINS['raw']['fullDisplayName'].index('#') + print (index) + + JENKINS['item'].append( + { + 'test_worker_job': JENKINS['raw']['fullDisplayName'], + 'test_type': test_type, + 'test_status': JENKINS['raw']['result'], + #'test_date': dateutil.parser.parse( JENKINS['raw']['id'] ).strftime('%Y-%m-%d %H:%M:%S'), + 'test_date': JENKINS['raw']['id'], + 'commit_event_type': commit_event_type, + 'commit_branch': commit_branch, + 'commit_project': commit_project, + 'committer': committer, + 'commit_id': commit_id, + 'gerrit': gerrit, + }) + if len( JENKINS['item'] ) > 0: + SEND['tool_prereviewtest_status'] = JENKINS['item'] + print ( SEND ) + + + + def send_data(self, CONFIG, SEND): + with open('jiradata.txt', 'w') as outfile: + json.dump(SEND, outfile) + + CONFIG['DATA_JENKINS']['params'] = {"parameter" : [ {"name": "jiradata.txt", "file": "file0"},]} + + with open("jiradata.txt", "rb") as f: + file_data = f.read() + + data, content_type = urllib3.encode_multipart_formdata([ + ("file0", (f.name, file_data)), + ("json", json.dumps(CONFIG['DATA_JENKINS']['params'])), + ("Submit", "Build"), + ]) + if 'token' in CONFIG['DATA_JENKINS'].keys(): + #url = '%s/job/%s/buildWithParameters?token=%s' % (CONFIG['DATA_JENKINS']['url'], CONFIG['DATA_JENKINS']['job'], CONFIG['DATA_JENKINS']['token']) + url = '%s/job/%s/buildWithParameters?token=%s' % (os.getenv("DATA_JENKINS_URL"), CONFIG['DATA_JENKINS']['job'], CONFIG['DATA_JENKINS']['token']) + result = requests.post(url, data=data, headers={"content-type": content_type}, verify=False) + else: + #url = '%s/job/%s/buildWithParameters' % (CONFIG['DATA_JENKINS']['url'], CONFIG['DATA_JENKINS']['job']) + url = '%s/job/%s/buildWithParameters' % ( os.getenv("DATA_JENKINS_URL"), CONFIG['DATA_JENKINS']['job']) + result = requests.post(url, auth=( os.getenv("DATA_JENKINS_USER"), os.getenv("DATA_JENKINS_PW")), data=data, headers={"content-type": content_type}, verify=False) + + + +if __name__ == "__main__": + + CONFIG = { + 'file_jenkins' : 'config.jenkins', + 'DATA_JENKINS' : {} + } + + sender = SendData() + + # load data jenkins config file + sender.load_file(CONFIG, 'file_jenkins', 'DATA_JENKINS') + + # get job name and build name + job_name = os.getenv("jobName") + build_number = os.getenv("buildNumber") + print "job_name = %s, buildNumber = %s\n" %(job_name, build_number) + + if job_name is not None and build_number is not None: + JENKINS = {} + SEND = {} + sender.get_data(JENKINS, job_name, build_number, 'SPIN_JENKINS', os.getenv("SPIN_JENKINS_USER"), os.getenv("SPIN_JENKINS_PW")) + + if 'function-test' in job_name or 'full-test' in job_name: + # function test data in spin jenkins + sender.parse_functiontest_data(JENKINS, SEND) + else: + # prereview test data in spin jenkins + sender.parse_prereviewtest_data(JENKINS, SEND) + + else: + JIRA = {} + JIRA['item'] = [] + SEND = {} + + # get data in spin jira + sender.get_jira_data(JIRA, SEND, 'SPIN_JIRA', os.getenv("SPIN_JIRA_USER"), os.getenv("SPIN_JIRA_PW")) + + # get data in spin jira + #sender.get_jira_data(JIRA, SEND, 'PUBLIC_JIRA', os.getenv("PUBLIC_JIRA_USER"), os.getenv("PUBLIC_JIRA_PW")) + + sender.send_data(CONFIG, SEND) + + + -- 2.7.4