MERGE - TRBS 92/132292/3
authorhyokeun <hyokeun.jeon@samsung.com>
Fri, 2 Jun 2017 06:21:55 +0000 (15:21 +0900)
committerhyokeun <hyokeun.jeon@samsung.com>
Fri, 2 Jun 2017 07:50:20 +0000 (16:50 +0900)
Change-Id: Ib6e905134109af0e9a1aced4a32c8ef42342f00c

42 files changed:
abs/job_abs_batch_all.py [new file with mode: 0755]
abs/job_abs_main.py [new file with mode: 0755]
abs/job_abs_update_vm.py [new file with mode: 0755]
abs/report_template [new file with mode: 0644]
common/buildservice.py
common/buildtrigger.py
common/git.py
common/snapshot.py
common/trbs.py [new file with mode: 0755]
common/upload_service.py
common/utils.py
debian/control
debian/jenkins-scripts-abs.install [new file with mode: 0644]
debian/jenkins-scripts-common.install
debian/jenkins-scripts-dependsgraph.install [new file with mode: 0644]
debian/jenkins-scripts-groovyinit.install [moved from debian/jenkins-scripts-init.install with 100% similarity]
debian/jenkins-scripts-trbs.install [new file with mode: 0644]
debian/jenkins-scripts.install
debian/rules
job_buildlogs.py
job_buildmonitor.py
job_create_sdkrootstrap.py [new file with mode: 0755]
job_create_snapshot.py
job_find_incorrect_filenames.py [new file with mode: 0755]
job_imager.py
job_importrpm_obs.py [new file with mode: 0644]
job_jobs_dispatcher.py
job_load_repos.yaml.py
job_pre_release_obs.py
job_rsync_download.py [new file with mode: 0755]
job_sync_repo.py [new file with mode: 0755]
job_trigger_for_sync_repo.py [new file with mode: 0755]
job_trigger_obs_sync.py [new file with mode: 0755]
job_update_public_git.py [new file with mode: 0755]
packaging/jenkins-scripts.spec
trbs/job_trbs_autoremove.py [new file with mode: 0755]
trbs/job_trbs_obs.py [new file with mode: 0755]
trbs/job_trbs_post_image.py [new file with mode: 0755]
trbs/job_trbs_submit.py [new file with mode: 0755]
trbs/job_trbs_sync_aws.py [new file with mode: 0755]
trbs/job_trbs_test_automation.py [new file with mode: 0755]
trbs/job_trbs_update_git.py [new file with mode: 0755]

diff --git a/abs/job_abs_batch_all.py b/abs/job_abs_batch_all.py
new file mode 100755 (executable)
index 0000000..830f81b
--- /dev/null
@@ -0,0 +1,800 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+
+import os
+import re
+import sys
+import subprocess
+import urllib, urllib2
+import requests
+from bs4 import BeautifulSoup
+from urllib2 import urlopen, ProxyHandler, build_opener, install_opener, URLError, HTTPError
+import time
+from datetime import datetime, timedelta
+import json
+import base64
+import ast
+import xml.etree.cElementTree as ET
+import inspect
+from random import randint
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.buildtrigger import trigger_info, trigger_next, get_jenkins_build_data
+from common.mapping import git_obs_map_full_list
+from common.utils import list_files_in_url, unicode_to_str
+from common.git import Git, clone_gitproject
+from common.send_mail import prepare_mail, makemail
+
+mail_title_prefix = '[ABS]'
+
+EMAIL_HEADER = 'App Build System(ABS) reports entire project build.\n' \
+               'It is intended for build check with new private rootstrap.\n' \
+               '(Base souce code from the most recent accepted SR)\n\n'
+EMAIL_FOOTER = '\n\n--------------------------------------------------------\n'\
+               'Automatically generated by backend service.\n'\
+               'Please DO NOT Reply!'
+
+def grap_text_from_url(url):
+
+    html = urlopen(url).read()
+    soup = BeautifulSoup(html)
+
+    # rip all script and style elements out
+    for script in soup(["script", "style"]):
+        script.extract()
+
+    text = soup.get_text()
+    lines = (line.strip() for line in text.splitlines())
+    chunks = (phrase.strip() for line in lines for phrase in line.split("  "))
+    text = '\n'.join(chunk for chunk in chunks if chunk)
+
+    return text
+
+def send_mail(title, msg_text):
+
+    if ': 0 error' in title:
+        return
+
+    title = "%s %s " % (mail_title_prefix, title)
+
+    # Record the MIME types of both parts - text/plain and text/html.
+    msg = '\n' + EMAIL_HEADER + msg_text + '\n\n' + EMAIL_FOOTER
+    print '\n[TITLE]\n%s\n[MSG]\n%s' % (title, msg)
+
+    email_to = []
+    email_to.extend(os.getenv('ABS_MAILINGLIST').split(','))
+
+    prepare_mail("%s.env" % os.getenv('BUILD_TAG'), \
+                 title, \
+                 msg, \
+                 os.getenv('NOREPLY_EMAIL_SENDER'), \
+                 email_to)
+
+class Git_gerrit_if(object):
+
+    def __init__(self, \
+                 gitcache=os.getenv('ABS_GIT_CACHE_DIR'), \
+                 hostname=os.getenv('ABS_GERRIT_HOSTNAME'), \
+                 username=os.getenv('ABS_GERRIT_USERNAME'), \
+                 sshport=os.getenv('ABS_GERRIT_SSHPORT')):
+        self.hostname = hostname
+        self.username = username
+        self.sshport = sshport
+        self.gitcache = gitcache
+        self.remote = 'ssh://%s@%s:%d/' \
+                      % (self.username, \
+                         self.hostname, \
+                         int(self.sshport))
+
+class Trigger_for_abs_update(object):
+
+    profiles = os.getenv('ROOTSTRAP_PROFILES').split(',')
+    trigger_arg = ''
+
+    kvm_root = '/'.join(os.getenv('ABS_VM_IMAGE').split('/')[:-1])
+    pattern = r'tizen[a-zA-Z_-]*[0-9]{8}.[0-9]{1,2}'
+
+    def __init__(self, new_rs=None, silent=False):
+
+        self.silent = silent
+        self.new_rootstrap_url = self.trigger_arg = new_rs
+        if new_rs != None and new_rs.startswith('http'):
+            self.new_rootstrap_url = new_rs
+            self.trigger_arg = re.match(r'.*\/(.*-.*_[0-9.]{1,})\/.*', new_rs).group(1)
+            for x in os.getenv('ROOTSTRAP_PROFILES').split(','):
+                expr = '(%s)_(\d{8}).([0-9]+)' % (x.lower().replace(':', '-'))
+                if re.search(expr, new_rs):
+                    self.profiles = [x]
+                    break
+        elif new_rs != None:
+            for x in os.getenv('ROOTSTRAP_PROFILES').split(','):
+                expr = '(%s)_(\d{8}).([0-9]+)' % (x.lower().replace(':', '-'))
+                if re.search(expr, new_rs):
+                    self.profiles = [x]
+                    break
+        if len(self.profiles) > 1:
+            raise Exception('Sorry. Your selection is not in my options:%s' % self.profiles)
+        print '  Version:%s  Profile:%s' \
+              % (self.trigger_arg, self.profiles)
+
+    def get_rootstrap_url(self, profile, version):
+        return self.new_rootstrap_url
+
+    def run_update_trigger(self, profile):
+
+        if self.trigger_arg != '':
+            rs_version = self.trigger_arg
+            print 'Rootstrap from pre-defined variable: %s' % rs_version
+        else:
+            raise Exception('Sorry. No rootstrap url argument given!')
+
+        print('-----[trigger update_vm job for {} ]-----'.format(rs_version))
+        trigger_data = {"contents":
+                           {"version": rs_version,
+                            "project": profile,
+                            "rs_url": self.get_rootstrap_url(profile, rs_version)},
+                        "title": "sdk_rootstrap_updated" }
+        trigger_next("update_vm_%s" % (rs_version), trigger_data)
+        if not self.silent:
+            print "\"TitleBatch\": \"update_vm(%s)\"" % (rs_version)
+
+    def main(self):
+
+        print('-----[JOB STARTED: Trigger_for_abs_update]-----')
+        for profile in self.profiles:
+            self.run_update_trigger(profile)
+
+class Initiate_build_all(object):
+
+    package_commit_list = {}
+    project = '' # Tizen:Mobile, Tizen:Wearable
+    version = '' # tizen-mobile_20160405.3, tizen-wearable_20160405.3
+    b_prerelease = False
+
+    def __init__(self):
+
+        fields = trigger_info(os.getenv('TRIGGER_INFO'))
+        self.project = fields['project']
+        if 'version' in fields:
+            self.version = fields['version']
+        else:
+            self.b_prerelease = True
+            self.commit_message = fields['commit_message']
+            self.tag_name = fields['tag_name']
+        self.staging = 'abs'
+        print '\nBuild all for %s - %s\n' % (self.project, self.version)
+
+        self.workspace = os.getenv('WORKSPACE')
+        self.gerrit_if = Git_gerrit_if()
+
+    def get_mapping_list(self):
+
+        print '****'
+        print self.project
+        print self.staging
+        print self.gerrit_if.gitcache
+        print self.gerrit_if.hostname
+        print self.gerrit_if.username
+        print self.gerrit_if.sshport
+        mapping_list = git_obs_map_full_list(obs_project=self.project, staging_project=self.staging, \
+                                             gitcache=self.gerrit_if.gitcache, \
+                                             gerrit_hostname=self.gerrit_if.hostname, \
+                                             gerrit_username=self.gerrit_if.username, \
+                                             gerrit_sshport=self.gerrit_if.sshport)
+        print mapping_list
+
+        for item in mapping_list:
+            prj_name = item['Project_name']
+            _branch = item['Branch_name']
+            _obs_prj = item['OBS_project']
+            _stg = item['OBS_staging_project']
+            _pkg = item['OBS_package']
+            _brch = ''
+            for _br in _branch.split(','):
+                if _brch == '' or len(_br) < len(_brch):
+                    _brch = _br
+            single_list = {'git_path': prj_name, \
+                           'branch_name': _brch, \
+                           'obs_project': _obs_prj, \
+                           'staging_project': _stg, \
+                           'package': _pkg}
+            self.package_commit_list[_pkg] = single_list
+            continue
+
+    def set_commit_message(self, git_path, msg):
+
+        for x in self.package_commit_list:
+            single_list = self.package_commit_list.get(x)
+            if single_list['git_path'] == git_path:
+                single_list['commit_message'] = msg
+                self.package_commit_list[x] = single_list
+                return
+
+    def set_commit_ids(self, full_commit_list):
+
+        print '\nLIST FOR BUILD WITH COMMIT ID:'
+        for x in self.package_commit_list:
+            single_list = self.package_commit_list.get(x)
+            single_list['commitid'] = full_commit_list.get(single_list['git_path'])
+            self.package_commit_list[x] = single_list
+            print self.package_commit_list.get(x)
+        print '\n'
+
+    def list_todo_packages(self):
+
+        self.get_mapping_list()
+        full_commit_list = {}
+        #ABS will find accepted sr tags. Do not need to parse commit ids here!
+        self.set_commit_ids(full_commit_list)
+
+    def retrieve_last_accepted_tag(self, mygit=None, gitpath=None, profile=None, obs_project=None):
+        # full build will use the most recent accepted tag.
+        t_outs, t_err, t_code = mygit._git_inout('for-each-ref', \
+                                     ['--sort=-taggerdate', '--format=%(refname)%0a%(*subject)%0a%(*body)', \
+                                      'refs/tags/accepted/%s/' % ('/'.join(obs_project.split(':')).lower()), '--count=1'])
+        if len(t_outs) == 0 or '/%s/' % profile not in t_outs.split('\n')[0]:
+            print 'Sorry. Most recent accepted tag %s is not desired one.' % t_outs.split('\n')[0]
+            return None
+        accepted_tag = t_outs.split('\n')[0].replace('refs/tags/','').rstrip()
+        orig_tag = t_outs[t_outs.rfind('Reference: ')+11:].split('\n')[0].split(' ')[0].strip().rstrip()
+        if orig_tag.startswith('submit/tizen'):
+            print 'FULL BUILD! Accepted tag from _tpk branch %s -> %s' % (accepted_tag, orig_tag)
+            return orig_tag
+
+        t_outs, t_err, t_code = mygit._git_inout('show', \
+                                     [accepted_tag])
+        if len(t_outs) == 0 or '- Git project: %s' % gitpath not in t_outs:
+            print 'Sorry(1). Fail to retrieve original tag from %s' % accepted_tag
+            return None
+        orig_tag = t_outs[t_outs.rfind('- Tag:')+6:].split('\n')[0].strip().rstrip()
+        if orig_tag.startswith('submit/tizen'):
+            print 'FULL BUILD! Accepted tag from source branch %s -> %s' % (accepted_tag, orig_tag)
+            return orig_tag
+
+        print 'Sorry(2). Fail to retrieve original tag from %s' % accepted_tag
+        return None
+
+    def initiate_submit_request(self):
+
+        print 'initiate_submit_request'
+        tag_name = re.search(r'(submit/tizen.*/[0-9]+.[0-9]+)', self.tag_name)
+        if tag_name is not None:
+            self.tag_name = tag_name.groups()[0]
+        else:
+            date_str = str(datetime.now())
+            tag_name_t = 'submit/' + self.project.replace(':', '_').lower() + '/' \
+                 + date_str.split(' ')[0].replace('-', '') \
+                 + '.' + date_str.split(' ')[1].split('.')[0].replace(':', '')
+            self.tag_name = tag_name_t
+        print '\n==============\nTag to push : %s\n==============' % self.tag_name
+
+        # Fetch first
+        for x in self.package_commit_list:
+            pkg_info = self.package_commit_list.get(x)
+            gerrit_project = pkg_info['git_path']
+            print '\n\n********* %s ********' % os.path.basename(gerrit_project).upper()
+            prjdir = os.path.join(self.workspace, os.path.basename(gerrit_project))
+            if not clone_gitproject(gerrit_project, prjdir, \
+                                    git_cache_dir=self.gerrit_if.gitcache, \
+                                    gerrit_hostname = self.gerrit_if.hostname, \
+                                    gerrit_username = self.gerrit_if.username, \
+                                    gerrit_sshport = self.gerrit_if.sshport):
+                raise LocalError('Error cloning project %s' % (gerrit_project))
+            mygit = Git(prjdir)
+            #TODO: Find accepted tag.
+            sr_tag = self.retrieve_last_accepted_tag(mygit, gerrit_project, \
+                                                     pkg_info['obs_project'].split(':')[-1].lower(), \
+                                                     pkg_info['obs_project'])
+            pkg_info['commitid'] = sr_tag
+            mygit.checkout(pkg_info['commitid'])
+            t_outs, t_err, t_code = mygit._git_inout('log', ['--format=%B', '-n 1'])
+            #commit_message = t_outs[:t_outs.rfind("\nChange-Id: ")]
+            #commit_message = '[ABS] %s\nOriginal Commit:%s' % (commit_message, pkg_info['commitid'])
+            self.set_commit_message(gerrit_project, self.commit_message)
+            sys.stdout.flush()
+
+        print '\n\n%s' % self.package_commit_list
+        return
+
+        # Push tag
+        for x in self.package_commit_list:
+            gerrit_project = self.package_commit_list.get(x)['git_path']
+            print '\nPushing tag for %s started at %s' % (gerrit_project, str(datetime.now()))
+            prjdir = os.path.join(self.workspace, os.path.basename(gerrit_project))
+            mygit = Git(prjdir)
+            mygit.create_tag(self.tag_name, \
+                             msg=self.package_commit_list.get(x)['commit_message'], 
+                             commit=self.package_commit_list.get(x)['commitid'])
+            #mygit.push_tag(self.gerrit_if.remote + gerrit_project, sr_tag)
+            print 'Pushing tag for %s finished at %s' % (gerrit_project, str(datetime.now()))
+
+        print "\"TitleBatch\": \"full_sr(%s)\"" % (self.version)
+
+    def build_all_together(self):
+
+        print 'build_all_together()'
+
+        index = 1
+        for x in self.package_commit_list:
+            single_dict = self.package_commit_list.get(x)
+            trigger_data = {
+#                            "rs_url": None,
+                            "obs_project": single_dict['obs_project'],
+                            "obs_package": single_dict['package'],
+                            "source":
+                                {"package": single_dict['git_path'].split('/')[-1],
+                                 "branch": single_dict['branch_name'],
+                                 "git_path": single_dict['git_path'],
+                                 "tag": single_dict['commitid']},
+                            "full_build": self.version,
+                            "event": {
+#                                "GERRIT_REFNAME": None, 
+                                "GERRIT_EVENT_ACCOUNT_EMAIL": "abs.robot@tizen.do.not.reply", 
+#                                "GERRIT_CHANGE_NUMBER": None, 
+                                "GERRIT_EVENT_ACCOUNT_NAME": "abs-robot", 
+#                                "GERRIT_REFSPEC": None, 
+                                "GERRIT_PROJECT": single_dict['git_path'], 
+#                                "GERRIT_PATCHSET_REVISION": None, 
+#                                "GERRIT_BRANCH": None, 
+                                "GERRIT_NEWREV": single_dict['commitid']},
+                            "index": str(index)}
+
+            if trigger_data['source']['tag'] is None:
+                print 'Cannot find mapping for %s - %s' % (trigger_data['source']['git_path'], trigger_data['obs_project'])
+            print 'ABS build for %s initiated.' % (trigger_data['source']['package'])
+            trigger_next('build_class_all_together_%d_%d_%s' \
+                         % (int(os.getenv('BUILD_NUMBER')), index, trigger_data['source']['package']), \
+                         trigger_data)
+            index += 1
+
+        print "\"TitleBatch\": \"full_build(%s)\"" % (self.version)
+
+    def main(self):
+
+        print('-----[JOB STARTED: Initiate_build_all]-----')
+        self.list_todo_packages()
+        #TODO: Enable initiate_submit_request() for pre-release process
+        if self.b_prerelease:
+            self.initiate_submit_request()
+        else:
+            self.build_all_together()
+
+class Jenkins(object):
+
+    jenkinsUrl = ''
+    jobName = ''
+    cred = None
+
+    def __init__(self, jobName=None):
+
+        jenkinsUrl = os.getenv('JOB_URL').replace('/'+os.getenv('JOB_NAME'), '')
+        auth = {'username': os.getenv('ABS_JENKINS_USER'), 'password': os.getenv('ABS_JENKINS_PASS')}
+        self.jenkinsUrl = jenkinsUrl.replace('http://', 'http://%s:%s@' \
+                                              % (auth['username'], auth['password']))
+        self.cred = {'url': os.getenv('JENKINS_URL'), \
+                     'username': auth['username'], \
+                     'password': auth['password']}
+        if jobName != None:
+            self.jobName = jobName
+        else:
+            self.jobName = os.getenv('ABS_BUILDER_JOB_NAME')
+
+class GatherBuildResult(Jenkins):
+
+    buildJob = None
+    buildNumber = None
+    result = ''
+    sr_tag = 'sr_tag'
+
+    def __init__(self, buildJob=None, buildNumber=None, raw_data=None):
+
+        super(GatherBuildResult, self).__init__()
+
+        self.buildJob = buildJob
+        self.buildNumber = buildNumber
+        if self.buildNumber == None:
+            self.buildNumber = 'lastBuild'
+        if self.buildJob == None:
+            self.buildJob = self.jobName
+        self.get_build_data(raw_data)
+
+    def get_build_data(self, raw_data=None):
+
+        if raw_data is None:
+            build_data = get_jenkins_build_data(job=self.buildJob, \
+                                                build_num=self.buildNumber, cred=self.cred)
+        else:
+            build_data = raw_data
+
+        self.result = build_data['result']
+        self.duration = time.strftime('%Mmin %Ssec', time.gmtime(float(build_data['duration']) / 1000.0))
+        self.timestamp = time.strftime('%Y%m%d.%H%M%S', time.gmtime(float(build_data['timestamp']) / 1000.0))
+        self.number = build_data['number']
+        self.git_size_from = '0'
+        self.git_size_to = '0'
+        for action in build_data['actions']:
+            if 'parameters' in action:
+                for param in action['parameters']:
+                    if 'name' in param and 'value' in param and param['name'] == 'TRIGGER_INFO':
+                        myvalue = param['value']
+                        self.trigger_info = ast.literal_eval( \
+                            base64.b64decode(myvalue).replace('null', '\"none\"'))
+            if 'text' in action:
+                if 'submit' in action['text']:
+                    self.sr_tag = '/'.join(action['text'].split('/')[2:])
+                else:
+                    self.git_size_from = action['text'].split('/')[0]
+                    self.git_size_to = action['text'].split('/')[1]
+
+    def get_sr_tag(self):
+        return self.sr_tag
+
+    def get_build_number(self):
+        return str(self.number)
+
+    def get_build_result(self):
+        return self.result
+
+    def get_build_duration(self):
+        return self.duration.replace('00min ', '')
+
+    def get_build_timestamp(self):
+        return self.timestamp
+
+    def get_obs_project(self):
+        return self.trigger_info['obs_project']
+
+    def get_profile(self):
+        return self.get_obs_project().split(':')[-1].lower()
+
+    def get_branch_name(self):
+        return self.trigger_info['source']['branch']
+
+    def get_parameter_with(self, parm1, parm2=None):
+        if parm1 not in self.trigger_info or \
+            parm2 is not None and parm2 not in self.trigger_info[parm1]:
+            return '-'
+        if parm2 is None:
+            return self.trigger_info[parm1]
+        else:
+            return self.trigger_info[parm1][parm2]
+
+    def get_full_build(self):
+        return self.get_parameter_with('full_build')
+
+    def get_package_name(self):
+        return self.get_parameter_with('source', 'package')
+
+    def get_git_path(self):
+        return self.get_parameter_with('event', 'GERRIT_PROJECT')
+
+    def get_account_name(self):
+        return self.get_parameter_with('event', 'GERRIT_EVENT_ACCOUNT_NAME')
+
+    def get_account_email(self):
+        return self.get_parameter_with('event', 'GERRIT_EVENT_ACCOUNT_EMAIL')
+
+    def get_revision(self):
+        return self.get_parameter_with('event', 'GERRIT_NEWREV')
+
+    def get_patchset_revision(self):
+        return self.get_parameter_with('event', 'GERRIT_PATCHSET_REVISION')
+
+    def get_refname(self):
+        return self.get_parameter_with('event', 'GERRIT_REFNAME')
+
+    def get_refspec(self):
+        return self.get_parameter_with('event', 'GERRIT_REFSPEC')
+
+    def get_build_cause(self):
+        if self.get_refname() != '-' and self.get_refname().startswith('refs/tags/submit/'):
+            return 'submit'
+        elif self.get_refname() != '-' and self.get_refname().startswith('refs/tags/'):
+            return 'tag'
+        elif self.get_refspec() != '-' and self.get_refspec().startswith('refs/changes/'):
+            return 'review'
+        else:
+            return 'commit'
+
+    def get_reference_name(self):
+        build_cause = self.get_build_cause()
+        if build_cause == 'submit' or build_cause == 'tag':
+            return self.get_refname(), self.get_revision()
+        elif build_cause == 'review':
+            return self.get_refspec(), self.get_patchset_revision()
+        elif build_cause == 'commit':
+            return self.get_refname(), self.get_revision()
+        else:
+            return None, None
+
+    def get_git_size(self):
+        return self.git_size_from + ',' + self.git_size_to
+
+class ReportFullBuildResult(object):
+
+    list_builds = []
+    email_body = []
+    profile = '' #TODO: Not a entire value but one of them
+    full_build_cause = None
+
+    def main(self):
+
+        marker = {'SUCCESS': 'O', 'FAILURE': 'X', 'ABORTED': 'X'}
+
+        print '\nTotal number = %s' % (os.getenv('RESULT_PARSE'))
+        for i in range(0, int(os.getenv('RESULT_PARSE'))):
+            my_build_job = os.getenv('ABS_BUILD_NUMBER_%d' % (i)).split('#')[0].strip()
+            my_build_id = os.getenv('ABS_BUILD_NUMBER_%d' % (i)).split('#')[-1].rstrip()
+            x = GatherBuildResult(buildJob=my_build_job, buildNumber=my_build_id)
+            self.list_builds.append(x)
+            self.profile = x.get_profile()
+            self.branch_name = x.get_branch_name()
+            if self.full_build_cause is None:
+                self.full_build_cause = x.get_full_build()
+                print 'Full build cause: %s\n' % self.full_build_cause
+                self.email_body.append('Full build cause: %s\n' % self.full_build_cause)
+            if x.get_build_result() != 'SUCCESS':
+                self.email_body.append('%s  %s (%s)' \
+                                       % (marker[x.get_build_result()], x.get_package_name(), x.get_sr_tag()))
+            print '  %s >> %s' % (os.getenv('ABS_BUILD_NUMBER_%d' %(i)), 
+                                  self.email_body[-1].rstrip())
+
+        fail_count = 0
+        for x in self.list_builds:
+            if x.get_build_result() != 'SUCCESS':
+                fail_count += 1
+                path = os.path.join(x.jenkinsUrl, x.buildJob, x.buildNumber, 'consoleText')
+               log_title = '\n[[ %s ]]\n' \
+                              % x.get_package_name().upper()
+                print log_title
+                full_text = unicode_to_str(requests.get(path).text)
+
+                exp = r'/home/build/tizen-sdk-cli/tools/smart-build-interface/../../platforms/[a-zA-Z-0-9./]{1,}/rootstraps/[a-zA-Z]{1,}-[0-9.]{1,}-(emulator|device).core.private.[0-9_]{1,}'
+                full_text = re.sub(exp, "{PRIVATE_ROOTSTRAP} ", full_text)
+
+                # No accepted tags so far
+                idx = full_text.rfind('Full build cannot be proceed. No matching SR tag')
+                if idx != -1: 
+                    full_text = 'No accepted tpk tags for this package'
+                    fail_count -= 1
+
+                # No rootstrap found
+                idx = full_text.rfind("+ rs_list='")
+                if idx == -1:
+                    full_text = 'System admin need to check rootstrap generation failure.'
+
+                #If Exception string found
+                idx = full_text.rfind('Exception ')
+                idx2 = -1
+                if idx != -1:
+                    idx2 = full_text[idx:].find('\n')
+                if idx != -1 and idx2 != -1:
+                    full_text = full_text[idx:idx+idx2]
+                else:
+                    for _str in ['PLATFORM_VER\t', 'Finished build-native', 'Automatically generated by backend service.', \
+                                 'Build step \'Execute shell\' marked build as', 'compilation terminated.']:
+                        idx = full_text.rfind(_str)
+                        if idx != -1:
+                            full_text = full_text[:idx]
+
+                # Trim the log
+                full_text = full_text[len(full_text)-384:]
+                print '%s' % full_text 
+                self.email_body.append(log_title + '\n' + full_text)
+
+        send_mail(' FULL BUILD (%s) : %d error' % (self.full_build_cause, fail_count), '\n'.join(self.email_body))
+
+        #if fail_count != 0:
+        #    self.full_build_cause = self.full_build_cause + '-FAIL'
+        Trigger_for_abs_update(self.full_build_cause, silent=True).main()
+
+        print "\n\n\"TitleBatch\": \"Result\""
+
+class SummaryReport(Jenkins):
+
+    span_start = 0
+    span_emd = 0
+
+    def __init__(self, span=None):
+
+        super(SummaryReport, self).__init__()
+        if span is None: span = os.getenv('REPORT_SPAN')
+        self._parse_date(span)
+        self.report_file = os.path.join('/'.join(os.getenv('ABS_VM_IMAGE').split('/')[:-1]), 'abs_history.report')
+        print "\n\n================\n==== BUILD HISTORY ====\n================\n"
+
+    def _parse_date(self, date=None):
+
+        if date is None:
+            date = '2D'
+
+        today = time.strftime('%Y%m%d', time.gmtime())
+        print 'Today: %s' % today
+        print 'Now: %s' % str(datetime.now())
+
+        self.current_date = datetime.now()
+#        self.span_end = self.span_start = current_date.strftime('%Y%m%d')
+        span_end = span_start = self.current_date
+
+        if 'all' in date.lower():
+            span_end = span_start = None
+        elif re.search(r'([0-9]+)[D]', date):
+            delta = re.search(r'([0-9]+)[D]', date).groups()[0]
+            span_start = self.current_date - timedelta(days=int(delta)-1)
+        elif re.search(r'([0-9]+)[W]', date):
+            delta = re.search(r'([0-9]+)[W]', date).groups()[0]
+            span_start = self.current_date - timedelta(days=int(delta)*7-1)
+        elif re.search(r'([0-9]+)[M]', date):
+            delta = re.search(r'([0-9]+)[M]', date).groups()[0]
+            span_start = self.current_date - timedelta(days=int(delta)*30-1)
+        elif re.search(r'([0-9]+)[Y]', date):
+            delta = re.search(r'([0-9]+)[Y]', date).groups()[0]
+            span_start = self.current_date - timedelta(days=int(delta)*365-1)
+        elif re.search(r'(\d{8})-(\d{8})', date):
+            span_start, span_end = re.search(r'(\d{8})-(\d{8})', date).groups()
+            span_start = datetime.strptime(span_start, '%Y%m%d')
+            span_end = datetime.strptime(span_end, '%Y%m%d')
+
+        if span_end is None or span_start is None:
+            self.span_start = self.span_end = None
+        else:
+            self.span_start = span_start.strftime('%Y%m%d')
+            self.span_end = span_end.strftime('%Y%m%d')
+
+        print 'start:%s, end:%s' % (self.span_start, self.span_end)
+
+    def generate_log(self):
+
+        # No Date Result Git Name Email Tag/Branch Revision Elapsed
+        old_history = []
+        self.new_history = []
+
+        if os.path.isfile(self.report_file):
+            with open(self.report_file, 'r') as rf:
+                for x in rf.readlines():
+                    old_history.append(x.rstrip().split(','))
+
+        if len(old_history) > 0:
+            last_history = old_history[0][0]
+        else:
+            last_history = '0'
+
+        # No Date Result Git Name Email Tag/Branch Revision Elapsed
+
+        build_data = get_jenkins_build_data(job=self.jobName, \
+                                            build_num=None, cred=self.cred)
+        sys.stdout.flush()
+
+        for build in build_data:
+            number = build['number']
+            duration = build['duration']
+            timestamp = build['timestamp']
+            this_date = time.strftime('%Y%m%d', time.gmtime(float(timestamp) / 1000.0))
+
+            if (self.span_end == None and self.span_start == None) or \
+                (self.span_end >= this_date and self.span_start <= this_date):
+
+                if int(last_history) >= int(number):
+                    break
+
+                x = GatherBuildResult(buildJob=None, buildNumber=None, raw_data=build)
+                if x.get_build_result() is None:
+                    continue
+                if 'tizen' in x.get_full_build():
+                    continue
+                if 'jenkins' in x.get_account_name().lower():
+                    continue
+
+                myhistory = []
+                myhistory.append(x.get_build_number())
+                myhistory.append(x.get_build_timestamp())
+                myhistory.append(x.get_build_result())
+                myhistory.append(x.get_git_path())
+                myhistory.append(x.get_account_name())
+                myhistory.append(x.get_account_email())
+                myhistory.append(x.get_reference_name()[0])
+                myhistory.append(x.get_reference_name()[1])
+                myhistory.append(x.get_build_duration())
+                myhistory.append(x.get_git_size())
+                self.new_history.append(myhistory)
+                print '**********************************'
+                print myhistory
+
+        with open(self.report_file, 'w') as newf:
+            for x in old_history:
+                self.new_history.append(x)
+            for x in self.new_history:
+                newf.write(','.join(x) + '\n')
+
+    def report_log(self):
+
+        self._history = []
+
+        if os.path.isfile(self.report_file):
+            with open(self.report_file, 'r') as rf:
+                for x in rf.readlines():
+                    self._history.append(x.rstrip().split(','))
+
+        for build in self._history:
+        # No Date Result Git Name Email Tag/Branch Revision Elapsed
+            _no, _da, _rt, _gt, _nm, _ml, _tg, _rv, _tm, _sz1, _sz2 = build
+            this_date = _da.split('.')[0]
+            if (self.span_end == None and self.span_start == None) or \
+                (self.span_end >= this_date and self.span_start <= this_date):
+                print ' '
+                print '[%s] >> %s' % (_gt, _rt)
+                print '    %s(%s)' % (_nm, _ml)
+                print '    %s @ %s' % (_tg, _rv)
+                print '    %s #%s (%s)' % (_da, _no, _tm)
+
+    def generate_html_report(self):
+
+        with open(os.path.join(os.getenv('WORKSPACE'), 'jenkins-scripts', \
+            'abs', 'report_template'), 'r') as tf:
+            js_template = tf.read().replace('%', '%%').replace('RAW_DATA_FROM_BACKEND', '%s');
+
+        raw_data = []
+        with open(self.report_file, 'r') as rf:
+            for x in rf.readlines():
+                _no, _dt, _rs, _pk, _nm, _em, _tg, _rv, _ep, _si, _so = x.split(',');
+                raw_data.append(
+                           {'num': _no,
+                            'date': _dt,
+                            'result': _rs,
+                            'package': _pk,
+                            'name': _nm,
+                            'email': _em,
+                            'tag': _tg,
+                            'rev': _rv,
+                            'elapsed': _ep,
+                            'sizein': _si,
+                            'sizeout': _so});
+
+        html_path = os.path.join(os.getenv('WORKSPACE'), 'html')
+        html_file = 'index.html'
+        os.makedirs(html_path)
+
+        html_contents = js_template % (raw_data)
+
+        with open(os.path.join(html_path, html_file), "w") as w_file:
+            w_file.write(html_contents)
+
+    def main(self):
+
+        self.generate_log()
+        #self.report_log()
+        self.generate_html_report()
+        print "\"TitleBatch\": \"Report\""
+
+if __name__ == '__main__':
+
+    try:
+        if os.getenv('REPORT_SPAN'):
+            trigger = SummaryReport()
+            sys.exit(trigger.main())
+
+        # Full build result parsing
+        elif os.getenv('RESULT_PARSE'):
+            trigger = ReportFullBuildResult()
+            ret = trigger.main()
+            trigger = SummaryReport(span='All')
+            trigger.main()
+            sys.exit(ret)
+
+        elif os.getenv('TRIGGER_INFO'): # Full build
+            trigger = Initiate_build_all()
+            sys.exit(trigger.main())
+        elif os.getenv('NEW_ROOTSTRAP'): # ABS VM update
+            trigger = Trigger_for_abs_update(new_rs=os.getenv('NEW_ROOTSTRAP'))
+            sys.exit(trigger.main())
+        else: #Default
+            print 'INVALID OPTION...'
+            sys.exit(-1)
+
+    except Exception as e:
+        print(e)
+        sys.exit(1)
+
+
diff --git a/abs/job_abs_main.py b/abs/job_abs_main.py
new file mode 100755 (executable)
index 0000000..f352308
--- /dev/null
@@ -0,0 +1,1411 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+"""This job is triggered by gerrit submit ref update event for preloaded app.
+   It will generate tizen app package(.tpk) and requests prerelease routine.
+"""
+
+import os
+import sys
+import shutil
+import subprocess
+import re
+import glob
+import zipfile
+import tarfile
+import json
+
+from subprocess import Popen, PIPE
+from datetime import datetime
+import time
+from random import randint
+import xml.etree.cElementTree as ElementTree
+from xml.sax.saxutils import escape
+from time import sleep
+
+from gitbuildsys.errors import ObsError
+from gbp.git.repository import GitRepositoryError
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.upload_service import upload_obs_service, UploadError, upload_obs_files
+
+from common import utils, mapping
+from common.utils import set_permissions, tail
+from common.mapping import git_obs_map
+
+from common.git import Git, clone_gitproject
+from common.gerrit import Gerrit, is_ref_deleted
+
+from common.buildservice import BuildService
+from common.prerelease import get_prerelease_project_name
+
+from common.buildtrigger import trigger_info, trigger_next
+from common.send_mail import prepare_mail
+from common import runner
+
+# set default char-set endcoding to utf-8
+reload(sys)
+sys.setdefaultencoding('utf-8') # pylint: disable-msg=E1101
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+class GerritEnv(object):
+
+    def __init__(self, event=None):
+
+        self.hostname = os.getenv('ABS_GERRIT_HOSTNAME')
+        self.username = os.getenv('ABS_GERRIT_USERNAME')
+        self.sshport  = os.getenv('ABS_GERRIT_SSHPORT')
+        self.gitcache = os.getenv('ABS_GIT_CACHE_DIR')
+
+class ObsBuildService(object):
+
+    def __init__(self): 
+        self.build = BuildService(os.getenv('ABS_OBS_API_URL'), \
+                                  os.getenv('ABS_OBS_API_USERNAME'), \
+                                  os.getenv('ABS_OBS_API_PASSWD'))
+        self.obs_url = os.getenv('ABS_OBS_URL')
+
+    def staging_project_name(self, prj_name, tag_name):
+        return get_prerelease_project_name(prj_name, tag_name)
+
+#### SOURCE COPY FROM JOB_SUBMIT.PY ####
+def change_release_name(build, project, git_tag):
+    """
+    Change release name from project config in OBS
+    Add the datetime into release name.
+    Eg: 'Release: <CI_CNT>.<B_CNT>' ----> 'Release: 20141010.<CI_CNT>.<B_CNT>'
+    """
+    # get project config
+    config = build.get_project_config(project)
+    release_name = 'Release: %s' % (git_tag.split('/')[-1])
+    res = re.findall(r'^Release: ?\S+$', config, flags=re.MULTILINE)
+    if res:
+        if git_tag.split('/')[-1] not in res[0]:
+            note = '#Insert time from submission into release name\n'
+            release_name = '%s.%s' % (release_name,
+                                      res[0].split('Release:')[1].strip())
+            config = config.replace(res[0], '%s%s' % (note, release_name), 1)
+    else:
+        note = '#Add release name into prjconf\n'
+        config = note + '%s\n' % release_name + config
+    # set project config
+    build.set_project_config(project, config)
+
+def copy_person_project_meta(build, obs_target_prj, obs_project):
+    """copy the maintainer list from obs_target_prj meta to corresponding
+       prerelease project
+    """
+    src_meta_xml = build.get_meta(obs_target_prj)
+    src_xml_root = ElementTree.fromstringlist(src_meta_xml)
+    # get peron list from obs_target_prj meta
+    person_dict = {}
+    for person in src_xml_root.findall('person'):
+        if person.get('userid') in person_dict:
+            person_dict[person.get('userid')].append(person.get('role'))
+        else:
+            person_dict[person.get('userid')] = [person.get('role')]
+    # add person to prerelease project
+    if person_dict:
+        build.addPerson(obs_project, person_dict)
+
+def create_related_packages(build, obs_target_prj, obs_pre_prj, pre_package):
+    """create the 'link' package that relate the original package
+       obs_target_prj is the base project
+       obs_pre_prj is the prelease project
+       pre_package is the original package
+    """
+    sourceinfo = build.get_sourceinfo_list(obs_target_prj)
+    for package in sourceinfo:
+        if sourceinfo[package]:
+            link_prj, link_pkg = sourceinfo[package][-1].split('/')
+            if link_prj == obs_target_prj and link_pkg == pre_package:
+                build.create_link_pac(obs_pre_prj, pre_package, \
+                                       obs_pre_prj, package)
+        #TODO: When to enable below feature???
+        return
+        if re.search("_aggregate", package):
+            print "Copypac aggregate package: %s/%s" %(obs_pre_prj, package)
+            build.create_copy_pac(obs_target_prj, package, obs_pre_prj,\
+                package)
+            aggregate_file_name="_aggregate"
+            build.get_source_file(obs_target_prj, package, aggregate_file_name)
+            content = ""
+            with open(aggregate_file_name, 'r') as f:
+                content = f.read()
+            content_xml_root = ElementTree.fromstringlist(content)
+            for element in content_xml_root.findall('aggregate'):
+                element.set('project',obs_pre_prj)
+            content = ElementTree.tostring(content_xml_root)
+            with open(aggregate_file_name, 'w') as f:
+                f.write(content)
+            commit_msg="uploaded to copy pac %s/%s from %s" % (obs_pre_prj, package, obs_target_prj)
+            try:
+                build.commit_files(obs_pre_prj, package,
+                       [(aggregate_file_name, True)], commit_msg)
+            except ObsError, error:
+                raise UploadError("Unable to upload _aggregate to %s: %s" % \
+                    (obs_pre_prj, error))
+            print "Copypac done."
+
+def create_project(git_url, git_project, git_tag, git_revision, build,
+                   obs_target_prj, obs_project, submitter, package, files=None):
+    """Create prerelease OBS project and upload sources for the build."""
+
+    # Create review project if it doesn't exist
+    print "Creating temporary review OBS project %s" % obs_project
+    info = {'projects': [git_project],
+            'obs_target_prj': obs_target_prj,
+            'git_tag': git_tag,
+            'git_commit': git_revision,
+            'obs_url': os.path.join(os.getenv('ABS_OBS_URL'), \
+                                    'project/show?project=%s' % obs_project),
+            'images': []}
+    if submitter:
+        info['submitter'] = escape(submitter)
+
+    if build.exists(obs_project):
+        # update project info
+        build.update_info(info, obs_project)
+        # unlink the project to upload packages
+        try:
+            build.unlink_project(obs_project)
+        except ObsError, error:
+            print 'Modify the meta conf to unlink failed: %s' % error
+    else:
+        if not build.exists(obs_target_prj):
+            raise LocalError("Target project %s doesn't exist" % obs_target_prj)
+        try:
+            build.create_project(obs_project, obs_target_prj,
+                                 description=json.dumps(info))
+        except ObsError, error:
+            LocalError("Unable to create project %s: %s" % (obs_project, error))
+
+    # change release name of project config in OBS
+    change_release_name(build, obs_project, git_tag) 
+
+    #disable publish flag
+    build.disable_build_flag(obs_project, repo = None, flag="publish", status="disable")
+
+    #disable build flag
+    build.disable_build_flag(obs_project, repo = None, flag="build", status="disable")
+
+    try:
+        if files is None:
+            upload_obs_service(git_url, git_project, git_tag,
+                               git_revision, obs_project, build, package)
+        else:
+            upload_obs_files(git_project, git_tag, git_revision, \
+                             obs_project, build, package, files)
+    except UploadError, err:
+        raise LocalError(err)
+
+    build.link_project(obs_project, src=obs_target_prj, linktype="localdep")
+
+    # copy the maintainer list from obs_target_prj meta to corresponding
+    # prerelease project
+    copy_person_project_meta(build, obs_target_prj, obs_project)
+
+    #create the 'link' package that relate the original package
+    create_related_packages(build, obs_target_prj, obs_project, package)
+
+    #Wait 10 seconds to upload the package to the OBS
+    sleep(10)
+
+    #default build flag
+    build.default_build_flag(obs_project, repo = None, flag="build")
+
+    #default publish flag
+    build.default_build_flag(obs_project, repo = None, flag="publish")
+
+def create_tarball(tpk_dir, src_dir):
+    os.makedirs(src_dir)
+    _tar_file = src_dir + '.tar.gz'
+    _tar = tarfile.open(_tar_file, 'w:gz')
+    for filename in os.listdir(tpk_dir):
+        if re.match('.*\.tpk', filename):
+            shutil.copy(os.path.join(tpk_dir, filename), os.path.join(src_dir, filename))
+    current_dir = os.getcwd()
+    os.chdir(os.path.dirname(src_dir))
+    _tar.add(os.path.basename(src_dir))
+    os.chdir(current_dir)
+    _tar.close()
+    return _tar_file
+
+class MailSender(object):
+
+    email_title = '[ABS]'
+    email_head  = 'App Build System(ABS) inform you the SDK build result.\n' \
+                 '(ABS will trigger build if you issues an SR on preloaded app branch)\n\n'
+    email_body = ''
+    email_footer = '\n\n--------\n'\
+                   'Automatically generated by backend service.\n'
+
+    email_to = []
+
+    def __init__(self, receiver=None, title=None, body=None):
+        if receiver is not None: self.email_to = receiver
+        if title is not None: self.email_title = title
+        if body is not None: self.email_body = body
+
+    def add_receiver(self, add_new):
+        if type(add_new) == list:
+            self.email_to.extend(x for x in add_new)
+        elif type(add_new) == str: 
+            self.email_to.append(add_new)
+        else:
+            print 'TYPE(%s) == %s' % (add_new, type(add_new))
+
+    def add_title(self, add_new):
+        self.email_title = self.email_title + ' ' + add_new
+
+    def add_message(self, add_new, top=None):
+        if top is not None:
+            self.email_body = add_new + '\n' + self.email_body
+        else:
+            self.email_body = self.email_body + '\n' + add_new
+
+    def add_maintainers(self, project):
+        mygerrit = Gerrit(GerritEnv().hostname, GerritEnv().username,\
+                          port=int(GerritEnv().sshport))
+        grps = mygerrit.ls_groups(['--project %s' % project])
+        dest_grp = [s for s in grps if " - Maintainers" in s]
+        for dg in dest_grp:
+            mbrs = mygerrit.ls_members(['\'\"%s\"\'' % dg, '--recursive'])
+            for line in mbrs:
+                self.add_receiver(line.split('\t')[3])
+
+    def send_mail(self):
+
+        if 'SUCCESS' not in self.email_title:
+            self.add_title('FAILURE')
+
+        self.email_body = self.email_head + self.email_body + self.email_footer
+
+        self.email_to = [x for x in self.email_to if x != 'n/a']
+        print '\n\n'
+        print self.email_title
+        m_body = ''
+        for m in self.email_body.splitlines():
+            m_body += '\n'.join(m[i:i+128] for i in xrange(0, len(m), 128)) + '\n'
+        self.email_body = m_body
+        print self.email_body
+        print self.email_to
+
+        prepare_mail("%s.env" % os.getenv('BUILD_TAG'), \
+                     self.email_title, \
+                     self.email_body, \
+                     os.getenv('NOREPLY_EMAIL_SENDER'), \
+                     self.email_to)
+
+        if 'SUCCESS' not in self.email_title:
+            return 4
+
+class VirtualMachine(object):
+
+    parallel_jobs_cfg = ''
+
+    def __init__(self, basedir, version_with=None, identifier=None):
+        self.virt_root = basedir
+        self.builddir = os.path.join(self.virt_root, 'build')
+        self.vm_image = os.getenv('ABS_VM_IMAGE')
+        if identifier is not None:
+            #TODO: HYOKEUN
+            if identifier == 'Tizen:Unified':
+                print 'Set tizen-unified'
+                identifier = 'tizen-unified'
+            else:
+                identifier = '-'.join(identifier.lower().split(':')[:-1])
+            self.vm_image = self.vm_image.replace('REL_VER', identifier)
+        if version_with is not None:
+            self.vm_image = self.vm_image + '.v-' + version_with
+            self.parallel_jobs_cfg = '-j 4'
+        self._generate_qemu_command()
+
+    def _generate_qemu_command(self):
+
+        ssh_connection = os.getenv('SSH_CONNECTION')
+        try:
+            lastip = int(ssh_connection.split(' ')[2].split('.')[3])
+        except:
+            lastip = randint(0x00, 0xff)
+            print 'Use random(%s) instead of ssh_connection(%s)' % (lastip, ssh_connection)
+        exeid = int(os.getenv('EXECUTOR_NUMBER', '1')) % 256
+        processid = os.getpid() % 256
+        buildid = int(os.getenv('BUILD_NUMBER', randint(0x00, 0xff))) % 256
+        mac_address = '52:%02x:%02x:%02x:%02x:%02x' % \
+                           (lastip, exeid, processid, buildid, randint(0x00, 0xff))
+        self.cmd  = 'qemu-system-x86_64 -machine accel=kvm:xen:tcg ' \
+                    '-name ubuntu -M pc -m %d -smp %d ' \
+                    '-drive file=%s,snapshot=on '\
+                    '-virtfs local,id=test_dev,path=%s,security_model=mapped,mount_tag=share ' \
+                    '-net nic,macaddr=%s -net user -nographic' \
+                    % (int(os.getenv("ABS_VM_MEMORY")), int(os.getenv("ABS_VM_CPUS")), \
+                       self.vm_image, self.virt_root, mac_address)
+        print "Running kvm machine...\n %s" % (self.cmd)
+
+    def generate_build_cmd(self, package, profile=None, gitpath=None):
+
+        BUILD_ROOT = "/home/build"
+        SDK_PATH = os.path.join(BUILD_ROOT, "tizen-sdk-cli")
+        SHARE_ROOT = "/share/build"
+        PROFILE = profile
+
+        if gitpath:
+            print 'Force set profile from %s' % gitpath
+            if 'profile/mobile/apps/native/' in gitpath:
+                PROFILE = 'mobile'
+            elif 'profile/wearable/apps/native/' in gitpath:
+                PROFILE = 'wearable'
+            else:
+                raise LocalError('Not supported profile %s' % gitpath)
+
+        if os.getenv('BUILD_MODE') == 'Debug':
+            build_mode = 'Debug'
+        else:
+            build_mode = 'Release'
+
+        #TODO: 64 bit build support??
+        buildcmd = '$!/bin/bash \n' \
+            'set -x\n' \
+            'ABS_CMD=/home/build/abs\n' \
+            'SHARE_ROOT=%s\nBUILD_ROOT=%s\nSDK_PATH=%s\nPROFILE=%s\n' \
+            'PACKAGE=%s\n' \
+            'TMP_DIR=${BUILD_ROOT}/${PACKAGE}/_abs_out_\n' \
+            'SDK_CMD=$SDK_PATH/tools/ide/bin/tizen; LIST="$SDK_CMD list rootstrap "\n' \
+            'chown -R build:build $SHARE_ROOT\n' \
+            'su - build -c "cp $SHARE_ROOT/abs $BUILD_ROOT/"\n' \
+            'su - build -c "unzip $SHARE_ROOT/${PACKAGE}.zip -d $BUILD_ROOT/ > /dev/null"\n' \
+            'function _clear { \n' \
+            '  mv ${TMP_DIR}/*.log ${SHARE_ROOT}\n' \
+            '  exit $1 \n' \
+            '}\n' \
+            'UPDATER="$SDK_PATH/update-manager/update-manager-cli.bin"\n' \
+            'if [ ! -f $UPDATER ]; then\n' \
+            '  UPDATER="$SDK_PATH/package-manager/package-manager-cli.bin"\n' \
+            'fi\n' \
+            'export DISPLAY=:0\n' \
+            'su - build -c "$UPDATER show-info"\n' \
+            'su - build -c "tizen version"\n' \
+            'su - build -c "df -hT"\n' \
+            'rs_list=`su - build -c "tizen list rootstrap | grep ${PROFILE}-.*.core.private.* | cut -d \' \' -f 1"`\n' \
+            'echo $rs_list >> $SHARE_ROOT/rsver\n' \
+            'for rs in $rs_list; do\n' \
+            '  echo "BUILDING START TIME: `date`"\n\n' \
+            '  #if [[ $rs == *"64.core"* ]]; then\n' \
+            '  #  echo "SKIP! 64-bit not supported!"; continue;\n' \
+            '  #else\n' \
+            '    su - build -c "${ABS_CMD} build -r $rs -w ${BUILD_ROOT}/${PACKAGE} -s ABS -c %s %s"\n' \
+            '  #fi\n' \
+            '  mv ${TMP_DIR}/*.tpk ${SHARE_ROOT}\n' \
+            '  ret=$?; echo "BUILDING FINISH TIME: `date`"\n\n' \
+            '  if [ $ret != 0 ]; then\n' \
+            '    echo $rs build fail; _clear 8\n' \
+            '  fi\n' \
+            'done\n' \
+            '_clear 0\n' \
+            'su - build -c "df -hT"\n' \
+            % (SHARE_ROOT, BUILD_ROOT, SDK_PATH, PROFILE, package, build_mode, self.parallel_jobs_cfg)
+
+        self.run_script = buildcmd
+        print self.run_script
+
+        with open(os.path.join(self.builddir, 'run'), 'w') as fcmdl:
+            fcmdl.write('%s' % self.run_script)
+        os.chmod(os.path.join(self.builddir, 'run'), 0777)
+
+    def get_log(self):
+
+        self.buildlog = ''
+        for filename in os.listdir(self.builddir):
+            if re.match('build.*\.log', filename):
+                onefile = tail(os.path.join(self.builddir, filename), c=2304)
+                self.buildlog = self.buildlog + onefile[:onefile.rfind("Finished build-native")]
+
+    def check_vm_result(self):
+
+        # Get installed rootstrap version
+        built_version = None
+        with open(os.path.join(self.builddir, 'rsver')) as rsverfile:
+            built_version = rsverfile.read().replace(' ', ', ').replace('\n', ' ').encode('utf8')
+            built_version = built_version.split('.')[-1]
+            print 'Installed RS version... %s' % built_version
+        if built_version is None:
+            print 'Not able detect installed Rootstrap version'
+
+        self.built_images = []
+        for filename in os.listdir(self.builddir):
+            if re.match('.*\.tpk', filename):
+                self.built_images.append(filename)
+        self.get_log()
+        if int(self.status) != 0:
+            return [built_version, 'FAIL::CLI BUILD', self.buildlog]
+        if not glob.glob(os.path.join(self.builddir, '*.tpk')):
+            for filename in os.listdir(self.builddir):
+                if re.match('pkg.*\.log', filename):
+                    self.buildlog = self.buildlog + tail(os.path.join(self.builddir, filename))
+            return [built_version, 'FAIL::CLI PKG', self.buildlog]
+        return [built_version, None, None]
+
+    def run(self):
+
+        subprocess.call(self.cmd, stdout=sys.stdout,
+                        stderr=sys.stderr, shell=True)
+        try:
+            with open(os.path.join(self.builddir,'testresult')) as statusf:
+                self.status = statusf.read().strip()
+            print 'KVM returned [%s]' % (self.status)
+        except IOError, _err:
+            raise LocalError('KVM Failed')
+
+class AbsRepository(object):
+
+    profile = ''
+
+    def __init__(self, project, email=None, name=None, \
+                 newrev=None, refname=None, changenumber=None, \
+                 branch=None, refspec=None, patchset=None, full_build=None, \
+                 obs_project=None, obs_package=None):
+        self.gerrit_event = {'project'     : project, \
+                             'email'       : email, \
+                             'name'        : name, \
+                             'newrev'      : newrev, \
+                             'refname'     : refname, \
+                             'changenumber': changenumber, \
+                             'branch'      : branch, \
+                             'refspec'     : refspec, \
+                             'patchset'    : patchset}
+
+        print 'AbsRepository:\n%s' % self.gerrit_event
+
+        self.full_build = full_build
+        self.set_gerrit_event()
+        self.set_obs_info(obs_project, obs_package)
+
+    def set_gerrit_event(self):
+
+        def _set_basic_data(self, req_type=None, tag=None, branch=None, revision=None):
+            if req_type is not None: self.request_type = req_type
+            if tag is not None:      self.tag = tag
+            if branch is not None:   self.branch = branch
+            #if revision is not None: self.revision = revision
+            self.revision = revision
+            print 'ABS basic data: Type(%s), Branch(%s), Tag(%s), Rev(%s)' \
+                  % (self.request_type, self.branch, self.tag, self.revision)
+
+        self.project = self.gerrit_event['project']
+        self.package = os.path.basename(self.project)
+
+        if self.full_build is not None:
+            _set_basic_data(self, 'FULLBUILD', \
+                            'full-build-tag', \
+                            self.gerrit_event['branch'], \
+                            'full-build-rev')
+        elif self.gerrit_event['changenumber'] is not None:
+            _set_basic_data(self, 'REVIEW', \
+                             self.gerrit_event['refspec'], \
+                             self.gerrit_event['branch'], \
+                             self.gerrit_event['patchset'])
+        elif self.gerrit_event['refname'] is not None:
+            if self.gerrit_event['refname'].startswith('refs/tags/submit/'):
+                _set_basic_data(self, 'SUBMIT', \
+                                 self.gerrit_event['refname'].replace('refs/tags/', ''), \
+                                 self.gerrit_event['refname'].replace('refs/tags/', '').split('/')[1], \
+                                 self.gerrit_event['newrev'])
+            elif self.gerrit_event['refname'].startswith('refs/tags/'):
+                raise LocalError('ABS will not process %s' % self.gerrit_event['refname'])
+            else:
+                _set_basic_data(self, 'COMMIT', \
+                                 self.gerrit_event['newrev'], \
+                                 self.gerrit_event['refname'], \
+                                 self.gerrit_event['newrev'])
+        else:
+            raise LocalError('ABS invalid request')
+
+    def set_package(self, package=None):
+        if package is not None:
+            self.package = package
+        else:
+            self.pacakge = os.path.basename(self.gerrit_event['project'])
+
+    def set_new_tag(self):
+        if self.branch.endswith('_' + self.profile):
+            self.new_tag = self.tag.replace(self.branch, self.branch + '_tpk')
+        else:
+            self.new_tag = self.tag.replace(self.branch, self.branch + '_' + self.profile + '_tpk')
+        print 'set new tag [%s]' % self.new_tag
+
+    def set_new_branch(self):
+        if self.branch.endswith('_' + self.profile):
+            self.new_branch = self.branch + '_tpk'
+        else:
+            self.new_branch = self.branch + '_' + self.profile + '_tpk'
+        print 'set new branch [%s]' % self.new_branch
+
+    def set_obs_info(self, obs_project, obs_package):
+        if obs_project is not None: 
+            self.obs_project = obs_project
+            self.profile = self.obs_project.split(':')[-1].lower()
+            print 'Set obs project to [%s]' % self.obs_project
+            print 'Set profile to [%s]' % self.profile
+            self.set_new_tag()
+            self.set_new_branch()
+        if obs_package is not None: 
+            self.obs_package = obs_package
+
+    def set_sr_tag(self, sr_tag):
+        self.sr_tag = sr_tag
+
+class SourceWork(object):
+
+    def __init__(self, builddir, project, branch=None, tag=None):
+        self.workspace = os.path.join(builddir, os.path.basename(project))
+        self.project = project
+        self.working_git = self.clone_from_gerrit(project=self.project)
+
+    def clone_from_gerrit(self, project=None):
+
+        if project is not None:
+            self.workspace = os.path.join(os.path.dirname(self.workspace), os.path.basename(project))
+            self.project = project
+
+        gerrit_env = GerritEnv()
+        for retry in xrange(20):
+            try:
+                print "Cloning into bare repo"
+                print 'work=%s' % os.path.join(os.getenv('GIT_CACHE_DIR'), self.project)
+                clone_gitproject(self.project, '%s.git' % \
+                                 os.path.join(os.getenv('GIT_CACHE_DIR'), self.project), \
+                                 gerrit_hostname=gerrit_env.hostname, \
+                                 gerrit_username=gerrit_env.username, \
+                                 gerrit_sshport=gerrit_env.sshport, \
+                                 bare=True)
+
+                print "trying to clone at %s " % (str(datetime.now()))
+                if not clone_gitproject(self.project, self.workspace, \
+                                        gerrit_hostname=gerrit_env.hostname, \
+                                        gerrit_username=gerrit_env.username, \
+                                        gerrit_sshport=gerrit_env.sshport):
+                    raise LocalError('Error cloning project %s' % self.project)
+                print "finished clone at %s " % (str(datetime.now()))
+                break
+            except Exception, e:
+                if retry >= 20:
+                    print 'Error git clone!!!'
+                    return None
+                    #raise LocalError('Error git clone for %s' % self.project)
+                print 'Retry cloning in 30 sec. %s' % str(e)
+                time.sleep(30)
+
+        return Git(self.workspace)
+
+    def checkout_git_project(self, git=None, branch=None, tag=None, patchset=False, full_build=None, profile=None, obs_project=None):
+        print 'Trying to checkout project(%s), Branch(%s), Tag(%s) at %s' \
+              % (self.project, branch, tag, str(datetime.now()))
+        if git is None: 
+            git = self.working_git
+
+        if patchset == True:
+            if tag and tag.startswith('refs/changes/'):
+                git._git_inout('fetch', ['origin', '%s' % tag])
+                git._git_inout('reset', ['--hard', 'FETCH_HEAD'])
+                return
+            else:
+                raise LocalError('Patchset detected but no tag')
+
+        if full_build is not None:
+            tag = self.retrieve_last_accepted_tag(profile=profile, obs_project=obs_project)
+            print 'Force to checkout full build for [%s]' % tag
+
+        try:
+            if tag is None:
+                git.checkout(branch)
+            else:
+                git.checkout(tag)
+        except Exception, e:
+            raise LocalError('Checking out failed. Project(%s), Branch(%s), Tag(%s) Reason(%s)' \
+                             % (self.project, branch, tag, str(e)))
+        return tag
+
+    def zipping_workspace(self):
+        current_dir = os.getcwd()
+        os.chdir(os.path.dirname(self.workspace))
+        zipf = zipfile.ZipFile(os.path.basename(self.workspace)+'.zip', 'w', zipfile.ZIP_DEFLATED)
+        for root, dirs, files in os.walk(os.path.basename(self.workspace)):
+            for file in files:
+                zipf.write(os.path.join(root, file))
+        zipf.close()
+        os.chdir(current_dir)
+
+    def zipping_workspace_low(self):
+        current_dir = os.getcwd()
+        os.chdir(os.path.dirname(self.workspace))
+        src_d = os.path.basename(self.workspace)
+        out_f = src_d + '.zip'
+        out = runner.show('zip --symlinks -r %s %s' % (out_f, src_d))
+        os.chdir(current_dir)
+
+    def zipping_workspace_workaround(self):
+        current_dir = os.getcwd()
+        os.chdir(os.path.dirname(self.workspace))
+        src_d = os.path.basename(self.workspace)
+        out_f = src_d + '.zip'
+        self.ZipDir(src_d, out_f)
+        os.chdir(current_dir)
+
+    def ZipDir(self, inputDir, outputZip):
+        '''Zip up a directory and preserve symlinks and empty directories'''
+        zipOut = zipfile.ZipFile(outputZip, 'w', compression=zipfile.ZIP_DEFLATED)
+    
+        rootLen = len(os.path.dirname(inputDir))
+        def _ArchiveDirectory(parentDirectory):
+            contents = os.listdir(parentDirectory)
+            #store empty directories
+            if not contents:
+                archiveRoot = parentDirectory[rootLen:].replace('\\', '/').lstrip('/')
+                zipInfo = zipfile.ZipInfo(archiveRoot+'/')
+                zipOut.writestr(zipInfo, '')
+            for item in contents:
+                fullPath = os.path.join(parentDirectory, item)
+                if fullPath.endswith('/.git'): continue
+                if os.path.isdir(fullPath) and not os.path.islink(fullPath):
+                    _ArchiveDirectory(fullPath)
+                else:
+                    archiveRoot = fullPath[rootLen:].replace('\\', '/').lstrip('/')
+                    if os.path.islink(fullPath):
+                        zipInfo = zipfile.ZipInfo(archiveRoot)
+                        zipInfo.create_system = 3
+                        # long type of hex val of '0xA1ED0000L',
+                        # say, symlink attr magic...
+                        zipInfo.external_attr = 2716663808L
+                        zipOut.writestr(zipInfo, os.readlink(fullPath))
+                    else:
+                        zipOut.write(fullPath, archiveRoot, zipfile.ZIP_DEFLATED)
+        _ArchiveDirectory(inputDir)
+        zipOut.close()
+
+    def register_user_account(self, name, email):
+        try:
+            with open(os.path.join(self.workspace, ".git", "config"), "a") as myfile:
+                myfile.write("[user]\n\tname = %s\n\temail = %s\n" \
+                             % (name, email))
+        except:
+            raise LocalError('Setting up git user failure. Exit now')
+        print "Tagging for - user:%s, email:%s" \
+            % (self.working_git.get_config('user.name'), self.working_git.get_config('user.email'))
+
+    def checkout_tpk_branch(self, new_branch):
+        if self.working_git.has_branch('origin' + new_branch, remote = True):
+            print 'Branch (%s) already exist' % new_branch
+            self.working_git.checkout(new_branch)
+        else:
+            self.working_git._git_inout('checkout', ['--orphan', '%s' % new_branch])
+            self.working_git._git_inout('rm', ['-rf', '.'])
+
+        print self.working_git._git_inout('fetch', ['origin'])[0]
+        print self.working_git._git_inout('reset', ['origin/' + new_branch, '--hard'])[0]
+
+    def retrieve_last_accepted_tag(self, profile=None, obs_project=None):
+        # full build will use the most recent accepted tag.
+        if obs_project == 'Tizen:Unified':
+            check_branch = 'tizen'
+        else:
+            check_branch = ('/'.join(obs_project.split(':')).lower())
+        t_outs, t_err, t_code = self.working_git._git_inout('for-each-ref', \
+                                     ['--sort=-taggerdate', '--format=%(refname)%0a%(*subject)%0a%(*body)', \
+                                      'refs/tags/accepted/%s/' % check_branch, '--count=1'])
+        #if len(t_outs) == 0 or '/%s/' % profile not in t_outs.split('\n')[0]:
+        if len(t_outs) == 0:
+            print 'Sorry. Most recent accepted tag %s is not desired one.' % t_outs.split('\n')[0]
+            return None
+        accepted_tag = t_outs.split('\n')[0].replace('refs/tags/','').rstrip()
+        orig_tag = t_outs[t_outs.rfind('Reference: ')+11:].split('\n')[0].split(' ')[0].strip().rstrip()
+        if orig_tag.startswith('submit/tizen'):
+            print 'FULL BUILD! Accepted tag from _tpk branch %s -> %s' % (accepted_tag, orig_tag)
+            return orig_tag
+
+        t_outs, t_err, t_code = self.working_git._git_inout('show', \
+                                     [accepted_tag])
+        if len(t_outs) == 0 or '- Git project: %s' % self.project not in t_outs:
+            print 'Sorry(1). Fail to retrieve original tag from %s' % accepted_tag
+            return None
+        orig_tag = t_outs[t_outs.rfind('- Tag:')+6:].split('\n')[0].strip().rstrip()
+        if orig_tag.startswith('submit/tizen'):
+            print 'FULL BUILD! Accepted tag from source branch %s -> %s' % (accepted_tag, orig_tag)
+            return orig_tag
+
+        print 'Sorry(2). Fail to retrieve original tag from %s' % accepted_tag
+        return None
+
+    def retrieve_license(self):
+        self.sdk_license = ''
+        license_file = os.path.join(self.workspace, 'LICENSE')
+        if os.path.isfile(license_file):
+            shutil.copy(license_file, os.path.dirname(self.workspace))
+            with open(license_file, 'r') as r_file:
+                for x in r_file.readlines():
+                    matchline = re.search(r'.*Licensed under the (.*) License, Version (.*) \(the \"License\"\).*;', x)
+                    if matchline:
+                        self.sdk_license = '%s-%s' % (matchline.groups()[0], matchline.groups()[1])
+        print 'LICENSE => [%s]' % self.sdk_license
+
+    def retrieve_properties_from_project_def(self):
+
+        # Single project
+        def_file = os.path.join(self.workspace, 'tizen-manifest.xml')
+        # Multi project
+        multiproject_list_f = os.path.join(self.workspace, 'WORKSPACE')
+        if os.path.isfile(multiproject_list_f):
+            with open(multiproject_list_f, 'r') as r_file:
+                for x in r_file.readlines():
+                    if os.path.isdir(os.path.join(self.workspace, x.rstrip())) and \
+                        os.path.isfile(os.path.join(self.workspace, x.rstrip(), 'tizen-manifest.xml')):
+                        def_file = os.path.join(self.workspace, x.rstrip(), 'tizen-manifest.xml')
+
+        print 'Property file: %s' % def_file
+        if os.path.isfile(def_file):
+            root = ElementTree.ElementTree(file=def_file).getroot()
+            self.sdk_package = root.attrib['package']
+            self.sdk_version = root.attrib['version']
+        else:
+            raise LocalError('Property file [%s] not found!' % def_file)
+        print 'TIZEN-MANIFEST.XML (%s) (%s)' % (self.sdk_package, self.sdk_version)
+
+    def retrieve_commit_id(self):
+        commit_id = self.working_git.rev_parse('HEAD')
+        print 'commit from (HEAD) = %s' % (commit_id)
+        return commit_id
+
+    def retrieve_revision_number(self, tag):
+        try:
+            revision_number = self.working_git.rev_parse(tag)
+        except:
+            revision_number = self.commit_id
+        print 'revision from (%s) = %s' % (tag, revision_number)
+        return revision_number
+
+    def retrieve_commit_message(self):
+        t_outs, t_err, t_code = self.working_git._git_inout('log', ['--format=%B', '-n 1'])
+        self.commit_message = t_outs[:t_outs.rfind("\nChange-Id: ")].rstrip() + '\n'
+        print "author comment : %s" % self.commit_message
+
+    def retrieve_sr_message(self, tag):
+        t_outs, t_err, t_code = self.working_git._git_inout('show', [tag, '--format=##chkstr:%b'])
+        sr_message = '\n'.join(t_outs[:t_outs.rfind("##chkstr:")].split('\n')[3:])
+        print "sr comment from(%s) : [%s]" % (tag, sr_message.rstrip())
+        return sr_message
+
+    def retrieve_project_property(self, tag):
+        self.retrieve_properties_from_project_def()
+        self.commit_id = self.retrieve_commit_id()
+        self.revision_number = self.retrieve_revision_number(tag)
+        self.retrieve_commit_message()
+        self.sr_message = self.retrieve_sr_message(tag)
+        self.retrieve_license()
+
+    def push_tpk_into_new_branch(self, tpk_dir, branch, obs_pkg_name, tag, new_tag, email, rs_ver):
+        # Check existance of tag first!
+        try:
+            chk_tag = self.working_git.rev_parse(new_tag)
+            print 'Checking tag [%s]' % (chk_tag)
+            if chk_tag:
+                print 'TAG(%s) already exist.' % new_tag
+                #self.working_git.push(repo = 'origin', src = ':' + new_tag)
+                #self.working_git._git_inout('tag', ['-d', new_tag])
+                #raise LocalError('Tag(%s) already exist' % new_tag)
+                return 'Tag(%s) already exist.' % new_tag
+        except:
+            pass
+        try:
+            self.working_git.remove_files('*')
+        except:
+            pass
+        try:
+            self.working_git.remove_files('packaging/*')
+        except:
+            pass
+        if not os.path.exists(os.path.join(self.workspace, 'packaging')):
+            os.makedirs(os.path.join(self.workspace, 'packaging'))
+        for filename in os.listdir(tpk_dir):
+            if re.match('.*\.tpk', filename):
+                print "Found [%s]." % (filename)
+                shutil.copy(os.path.join(tpk_dir, filename), os.path.join(self.workspace, filename))
+        self.working_git.add_files('*.tpk', True)
+
+        with open(os.path.join(self.workspace, "packaging", obs_pkg_name + ".spec"), "w") as text_file:
+            text_file.write("%s" % self.spec)
+        self.working_git.add_files('packaging/*.spec', True)
+
+        #TODO:
+        if False: #self.sdk_license:
+            shutil.copy(os.path.join(os.path.dirname(self.workspace), 'LICENSE'), \
+                        os.path.join(self.workspace, 'LICENSE'))
+            self.working_git.add_files('LICENSE', True)
+
+        commit_message = "[ABS] %s\nReference: %s\nCommit id: %s\nRequested by: %s\nSDK rootstrap version: %s" \
+                        % (self.commit_message, tag, self.commit_id, email, rs_ver)
+        self.working_git.commit_staged(commit_message)
+        self.commit_id_new = self.retrieve_commit_id()
+
+        push_err_msg = ''
+        push_retry = 20
+        while push_retry > 0:
+            try:
+                self.working_git.push(repo = 'origin', src = '%s' % (branch), force=True)
+                return None
+            except GitRepositoryError, gre:
+                print 'git push exception: %s' % str(gre)
+                push_err_msg = push_err_msg + '\n' + str(gre)
+                push_retry -= 1
+                sleep(5)
+        if not push_retry:
+            print 'Push failed 20 times'
+            return push_err_msg
+
+    def push_tag_new_branch(self, tag, new_tag, mail):
+
+        gerrit_env = GerritEnv()
+        remote = 'ssh://%s@%s:%d/%s' % (gerrit_env.username, \
+                                        gerrit_env.hostname, \
+                                        int(gerrit_env.sshport), \
+                                        self.project)
+        message = '%s\n' \
+            '[ABS] Ready.\n' \
+            '- Original Tag: %s (%s)\n' \
+            '- Original Commit: %s\n' \
+            '- Requested By: %s\n' \
+            % (self.sr_message, tag, self.revision_number, self.commit_id, mail)
+
+        try:
+            self.working_git.create_tag(name=new_tag, msg=message, commit=self.commit_id_new) 
+        except GitRepositoryError, e:
+            #raise LocalError('Cannot create tag %s (%s)' % (new_tag, str(e)))
+            print 'Cannot create tag %s (%s)' % (new_tag, str(e))
+            return str(e)
+
+        self.revision_number_new = self.retrieve_revision_number(new_tag)
+
+        push_err_msg = ''
+        push_retry = 20
+        while push_retry >0:
+            try:
+                self.working_git.push_tag(remote, new_tag)
+                return None
+            except GitRepositoryError, e:
+                #raise LocalError('Cannot push tag %s (%s)' % (new_tag, str(e)))
+                print 'Cannot push tag %s (%s)' % (new_tag, str(e))
+                push_err_msg = push_err_msg + '\n' + str(e)
+                push_retry -= 1
+                sleep(5)
+        if not push_retry:
+            print 'Push tag failed 20 times'
+            return push_err_msg
+
+    def generate_spec_file(self, obs_pkg_name, rsver, no_tpk_branch='false', include_archs=None):
+
+        group = ''
+        license = self.sdk_license
+        summary = ''
+        if os.path.isdir(os.path.join(self.workspace, 'packaging')):
+            for filename in os.listdir(os.path.join(self.workspace, 'packaging')):
+                if re.match('.*\.spec', filename):
+                    with open(os.path.join(self.workspace, 'packaging', filename)) as f_spec:
+                        for x in f_spec.readlines():
+                            if group == '' and re.match('Group:.*', x):
+                                group = x.split(':')[1].strip().rstrip()
+                            if license == '' and re.match('License:.*', x):
+                                license = x.split(':')[1].strip().rstrip()
+                            if summary == '' and re.match('Summary:.*', x):
+                                summary = x.split(':')[1].strip().rstrip()
+        if group == '': group = 'N/A'
+        if license == '': license = 'N/A'
+        if summary == '': summary = obs_pkg_name.replace('org.tizen.', '')
+
+        vcs_desc = '#'
+        if no_tpk_branch != 'true':
+            vcs = '#VCS_FROM:   %s' % (self.project + "#" + self.commit_id)
+            vcs_desc = vcs.split(' ')[-1]
+        else:
+            vcs = 'VCS:         %s' % (self.project + "#" + self.commit_id)
+
+        if os.getenv('EXCLUDE_ARCH'):
+            exclude_arch = 'ExcludeArch: '
+            for exa in os.getenv('EXCLUDE_ARCH').split(','):
+                exclude_arch = exclude_arch + ' ' + exa
+        else:
+            exclude_arch = ''
+
+        if include_archs is not None:
+            exclusive_arch = 'ExclusiveArch: '
+            for exa in include_archs:
+                exclusive_arch = exclusive_arch + ' ' + exa
+        else:
+            exclusive_arch = ''
+
+        if False: #if oss.getenv('BUILD_MODE') == 'Debug':
+            build_mode = '-debug'
+        else:
+            build_mode = '%{nil}'
+
+        header  = "Name:       %s\n%s\n#RS_Ver:    %s\n" \
+                  "Summary:    %s\nVersion:    %s\nRelease:    1\n" \
+                  "Group:      %s\nLicense:    %s\n" \
+                  "Source0:    %%{name}-%%{version}.tar.gz\n" \
+                  "\n" \
+                  "%s\n" \
+                  "%s\n" \
+                  "BuildRequires:  pkgconfig(libtzplatform-config)\n" \
+                  "Requires(post):  /usr/bin/tpk-backend\n\n" \
+                  "%%define internal_name %s\n" \
+                  "%%define preload_tpk_path %%{TZ_SYS_RO_APP}/.preload-tpk \n" \
+                  "\n" \
+                  "%%define build_mode %s\n\n" \
+                  "%%ifarch arm armv7l\n%%define target arm\n%%endif\n" \
+                  "%%ifarch aarch64\n%%define target aarch64\n%%endif\n" \
+                  "%%ifarch x86_64\n%%define target x86_64\n%%endif\n" \
+                  "%%ifarch i386 i486 i586 i686\n%%define target x86\n%%endif\n" \
+                  "%%description\n" \
+                  "%s\nThis is a container package which have preload TPK files\n\n" \
+                  % (obs_pkg_name, vcs, rsver, summary, self.sdk_version, \
+                     group, license, exclusive_arch, exclude_arch, self.sdk_package, build_mode, vcs_desc)
+
+        content = "%prep\n%setup -q\n\n%build\n\n" \
+                  "%install\n" \
+                  "rm -rf %{buildroot}\n" \
+                  "mkdir -p %{buildroot}/%{preload_tpk_path}\n" \
+                  "install %{internal_name}-%{version}-%{target}%{build_mode}.tpk %{buildroot}/%{preload_tpk_path}/\n" \
+                  "\n" \
+                  "%post\n" \
+                  "\n" \
+                  "%files\n%defattr(-,root,root,-)\n" \
+                  "%{preload_tpk_path}/*\n"
+        #TODO: App sources are open to the public. Installing LICENSE file is not required.
+        #if self.sdk_license:
+        #    content = content + '%license LICENSE\n'
+
+        self.spec = header + content
+        #print '\n\n==========\n%s\n===========\n\n' % self.spec
+
+    def get_git_repo_size(self, quiet=True):
+        if quiet == False:
+            self.working_git._git_inout('gc', ['--quiet'])
+        pack_size, t_err, t_code = self.working_git._git_inout('count-objects', ['-v'])
+        pack_size = pack_size[pack_size.find('size-pack:')+11:].split('\n')[0]
+        if quiet == False:
+            print '\"UNPACK-SIZE\": \"%s/%s\"' % (self.pack_size, pack_size)
+        else:
+            self.pack_size = pack_size
+
+class Builder(object):
+
+    full_build = None
+    create_obs_job = False
+
+    def __init__(self):
+        fields = trigger_info(os.getenv('TRIGGER_INFO'))
+        if 'event' not in fields:
+            self.create_obs_job = True
+            return
+
+        event = fields['event']
+
+        if 'full_build' in fields:
+            self.full_build = fields['full_build']
+            print 'Full build requested for %s' % self.full_build
+            self._abs = AbsRepository(project= event['GERRIT_PROJECT'], \
+                                  email=       event['GERRIT_EVENT_ACCOUNT_EMAIL'], \
+                                  name=        event['GERRIT_EVENT_ACCOUNT_NAME'], \
+                                  branch=      fields['source']['branch'], \
+                                  full_build=  self.full_build, \
+                                  obs_project= fields['obs_project'], \
+                                  obs_package= fields['obs_package'])
+        else:
+            self._abs = AbsRepository(project=     event['GERRIT_PROJECT'], \
+                                      email=       event['GERRIT_EVENT_ACCOUNT_EMAIL'], \
+                                      name=        event['GERRIT_EVENT_ACCOUNT_NAME'], \
+                                      newrev=      event['GERRIT_NEWREV'], \
+                                      refname=     event['GERRIT_REFNAME'], \
+                                      changenumber=event['GERRIT_CHANGE_NUMBER'], \
+                                      branch=      event['GERRIT_BRANCH'], \
+                                      refspec=     event['GERRIT_REFSPEC'], \
+                                      patchset=    event['GERRIT_PATCHSET_REVISION'], \
+                                      obs_project= fields['obs_project'], \
+                                      obs_package= fields['obs_package'])
+
+        #self._abs.set_obs_info(fields['obs_project'], fields['obs_package'])
+
+    def prepare_working_directory(self):
+
+        self.basedir = os.path.join(os.getenv('WORKSPACE'), os.getenv('JOB_NAME'))
+        if os.path.exists(self.basedir):
+            shutil.rmtree(self.basedir)
+        self.builddir = os.path.join(self.basedir, 'build')
+        print 'basedir=%s, builddir=%s' % (self.basedir, self.builddir)
+        os.makedirs(self.builddir)
+
+    def create_obs_project(self):
+
+        fields = trigger_info(os.getenv('TRIGGER_INFO'))
+
+        my_build = ObsBuildService()
+        my_mail = MailSender(receiver = fields['mail_contents']['receiver'], \
+                             title = fields['mail_contents']['title'], \
+                             body = fields['mail_contents']['body'])
+
+        if True:
+            retry_count = 3
+            while retry_count > 0:
+                try:
+                    create_project(fields['url'], \
+                                   fields['project'], \
+                                   fields['_obs_tag'], \
+                                   fields['_obs_revision'], \
+                                   my_build.build, \
+                                   fields['obs_project'], \
+                                   fields['new_obs_project'], \
+                                   fields['submitter'], \
+                                   fields['obs_package'], \
+                                   fields['_obs_files'])
+
+                    my_mail.add_title('SUCCESS')
+                    my_mail.add_message('To view, visit %s/package/show?package=%s&project=%s' \
+                                % (my_build.obs_url, fields['obs_package'], fields['new_obs_project']))
+                    break
+                except Exception, err:
+                    print err
+                    sleep(15)
+                    retry_count -= 1
+            if not retry_count:
+                print 'retrying failed'
+                fail_title = 'OBS creation fail'
+                my_mail.add_title(fail_title)
+                my_mail.add_message('Result: FAIL')
+                my_mail.add_message('\n\nFail to create OBS prerelease project')
+
+        return my_mail.send_mail()
+
+    def main(self):
+
+        if self.create_obs_job:
+            return self.create_obs_project()
+
+        my_build = ObsBuildService()
+        my_mail = MailSender()
+
+        self.prepare_working_directory()
+
+        my_mail.add_receiver(self._abs.gerrit_event['email'])
+        my_mail.add_message('Package: %s' % self._abs.project)
+        if self._abs.request_type == 'SUBMIT':
+            my_mail.add_receiver(os.getenv('ABS_MAILINGLIST').split(','))
+            #my_mail.add_maintainers(self._abs.project)
+        else:
+            my_mail.add_title('(preview)')
+        my_mail.add_title(self._abs.package)
+        my_mail.add_title('(' + self._abs.branch + ')')
+        if self.full_build is not None:
+            my_mail.add_message('Build cause: New Rootstrap %s' % self.full_build)
+
+        #### Fetch Source Code
+        my_source = SourceWork(self.builddir, \
+                               self._abs.project)
+        if my_source.working_git is None:
+            print '\"Title\": \"%s/%s\"' \
+                   % (self._abs.package, self._abs.tag.replace('refs/',''))
+            raise LocalError('Fail to clone %s' % self._abs.project)
+
+        int_tag = my_source.checkout_git_project(branch = self._abs.branch, \
+                                                 tag = self._abs.tag, \
+                                                 patchset=self._abs.request_type=='REVIEW', \
+                                                 full_build=self.full_build, \
+                                                 profile=self._abs.profile, \
+                                                 obs_project=self._abs.obs_project)
+
+        my_source.get_git_repo_size()
+
+        if self.full_build is None:
+            print '\"Title\": \"%s/%s\"' \
+                   % (self._abs.package, self._abs.tag.replace('refs/',''))
+        else:
+            #TODO:
+            self._abs.tag = int_tag
+            print '\"Title\": \"%s/%s/%s\"' \
+                  % (self._abs.package, self.full_build, self._abs.tag)
+            if int_tag == None or not int_tag.startswith('submit/tizen'):
+                raise LocalError('Full build cannot be proceed. No matching SR tag')
+
+        my_mail.add_message('Tag/branch: %s' % self._abs.tag)
+        my_mail.add_message('Target OBS project: %s' % self._abs.obs_project)
+
+        # Retrieve project settings
+        my_source.retrieve_project_property(self._abs.tag)
+        my_mail.add_message('SR subject: %s' % my_source.sr_message.rstrip(), top=True)
+
+        my_source.zipping_workspace_workaround()
+
+        my_vm = VirtualMachine(self.basedir, version_with=self.full_build, identifier=self._abs.obs_project)
+        my_vm.generate_build_cmd(self._abs.package, self._abs.profile, gitpath=self._abs.project)
+
+        #### Running QEMU to launch Tizen SDK CLI build
+        print "[ s d k - k v m    s t a r t ] %s " % (str(datetime.now()))
+        sys.stdout.flush()
+        ret = my_vm.run()
+        print "[ s d k - k v m    f i n i s h ] %s " % (str(datetime.now()))
+        set_permissions(self.builddir, (0644, 0755))
+        sys.stdout.flush()
+
+        rs_ver, fail_title, fail_log = my_vm.check_vm_result()
+        my_mail.add_message('Sdk Rootstrap Version: %s' % rs_ver)
+        print 'VM returned [%s] [%s] [%s]\n' % (rs_ver, fail_title, fail_log)
+        if fail_title != None:
+            my_mail.add_title(fail_title)
+            my_mail.add_message('Result: FAIL')
+            my_mail.add_message('\n\n%s' % fail_log)
+
+            if self.full_build is None:
+                return my_mail.send_mail()
+
+        found_archs = []
+        archs_lookup = [['i386', 'i486', 'i586', 'i686', 'x86'], \
+                        ['arm', 'armv7l'], \
+                        ['x86_64'], \
+                        ['aarch64']]
+
+        for fn in os.listdir(my_vm.builddir):
+            mtch = re.search(r'(.*)-([0-9.]+)-(.*)(?<!-debug).tpk', fn)
+            if mtch and mtch.groups() and len(mtch.groups()) == 3:
+                for l in archs_lookup:
+                    if mtch.groups()[2] in l:
+                        found_archs.extend(l)
+                        break
+        found_archs = list(set(found_archs))
+        print 'found_archs=%s' % found_archs
+
+        # Generate .spec file
+        my_source.generate_spec_file(self._abs.obs_package, rs_ver, \
+                                     no_tpk_branch=os.getenv('NO_TPK_BRANCH'), include_archs=found_archs)
+
+        if self.full_build is not None:
+            if fail_title != None:
+                return 4
+            return
+
+        push_err_msg = None
+        if fail_title == None and self._abs.request_type != 'SUBMIT':
+            my_mail.add_title('SUCCESS')
+        elif fail_title == None and self._abs.request_type == 'SUBMIT' and os.getenv('NO_TPK_BRANCH') != 'true':
+            # Register User Account
+            my_source.register_user_account(self._abs.gerrit_event['name'], \
+                                            self._abs.gerrit_event['email'])
+            # Checkout _tpk branch and push the tpks and tag it
+            my_source.checkout_tpk_branch(self._abs.new_branch)
+            push_err_msg = my_source.push_tpk_into_new_branch(my_vm.builddir, \
+                                               self._abs.new_branch, \
+                                               self._abs.obs_package, \
+                                               self._abs.tag, \
+                                               self._abs.new_tag, \
+                                               self._abs.gerrit_event['email'], rs_ver)
+
+            my_source.get_git_repo_size(quiet=False)
+
+            if push_err_msg == None:
+                push_err_msg = my_source.push_tag_new_branch(self._abs.tag, \
+                                          self._abs.new_tag, \
+                                          self._abs.gerrit_event['email'])
+
+        if push_err_msg != None:
+            fail_title = 'Git push failed'
+            my_mail.add_title(fail_title)
+            my_mail.add_message('Result: FAIL')
+            my_mail.add_message('\n\nGit push failed %s' % (push_err_msg))
+            return my_mail.send_mail()
+
+        elif fail_title == None and self._abs.request_type == 'SUBMIT' and push_err_msg == None:
+            print "----> old_tag=%s\n      revision=%s\n      commit=%s" \
+                % (self._abs.tag, my_source.revision_number, my_source.commit_id)
+
+            # TODO: Pre-release Job
+            submitter = '%s <%s>' % (self._abs.gerrit_event['name'], self._abs.gerrit_event['email'])
+            url = 'ssh://%s:%d' % (GerritEnv().hostname, int(GerritEnv().sshport))
+
+            if os.getenv('NO_TPK_BRANCH') == 'true':
+                new_obs_project = my_build.staging_project_name(self._abs.obs_project, self._abs.tag)
+                print '\n  Trying to create OBS prerelease project(%s)\n' % (new_obs_project)
+                spec_file = os.path.join(self.builddir, self._abs.obs_package + ".spec")
+                with open(spec_file, "w") as text_file:
+                    text_file.write("%s" % my_source.spec)
+                tarball_file = create_tarball(my_vm.builddir, \
+                                              os.path.join(self.builddir, self._abs.obs_package + \
+                                                           '-' + my_source.sdk_version))
+                _obs_files = [spec_file, tarball_file]
+                _obs_tag = self._abs.tag
+                _obs_revision = my_source.revision_number
+                print 'Directly uploading files %s for (%s,%s)' % (_obs_files, _obs_tag, _obs_revision)
+                #TODO: How to maintain tpk history???
+            else:
+                new_obs_project = my_build.staging_project_name(self._abs.obs_project, self._abs.new_tag)
+                print '\n  Trying to create OBS prerelease project(%s)\n' % (new_obs_project)
+                _obs_files = None
+                _obs_tag = self._abs.new_tag
+                _obs_revision = my_source.revision_number_new
+                print "----> new_tag=%s\n      revision=%s\n      commit=%s" \
+                    % (_obs_tag, _obs_revision, my_source.commit_id_new)
+
+            #TODO: SEND SUCCESS MAIL
+            my_mail.add_title('SUCCESS')
+            my_mail.add_message('To view, visit %s/package/show?package=%s&project=%s' \
+                                 % (my_build.obs_url, self._abs.obs_package, new_obs_project))
+            my_mail.send_mail()
+
+            ######## Create obs project job in master node ########
+            trigger_data = {'url': url, \
+                            'project': self._abs.project, \
+                            '_obs_tag': _obs_tag, \
+                            '_obs_revision': _obs_revision, \
+                            'obs_project': self._abs.obs_project, \
+                            'new_obs_project': new_obs_project, \
+                            'submitter': submitter, \
+                            'obs_package': self._abs.obs_package, \
+                            '_obs_files': _obs_files, \
+                            'mail_contents': {'receiver': my_mail.email_to, \
+                                             'title': my_mail.email_title, \
+                                             'body': my_mail.email_body}}
+            trigger_next("CREATE_OBS", trigger_data)
+
+        print "[ JOB FINISHED AT %s ]" % (str(datetime.now()))
+
+class Dispatcher(object):
+
+    def __init__(self):
+        self._abs = AbsRepository(project=     os.getenv('GERRIT_PROJECT'), \
+                                  email=       os.getenv('GERRIT_EVENT_ACCOUNT_EMAIL'), \
+                                  name=        os.getenv('GERRIT_EVENT_ACCOUNT_NAME'), \
+                                  newrev=      os.getenv('GERRIT_NEWREV'), \
+                                  refname=     os.getenv('GERRIT_REFNAME'), \
+                                  changenumber=os.getenv('GERRIT_CHANGE_NUMBER'), \
+                                  branch=      os.getenv('GERRIT_BRANCH'), \
+                                  refspec=     os.getenv('GERRIT_REFSPEC'), \
+                                  patchset=    os.getenv('GERRIT_PATCHSET_REVISION'))
+
+        print 'Dispatcher project=[%s], package=[%s]' % (self._abs.project, self._abs.package)
+
+    def query_git_obs_map(self):
+
+        # check whether git-obs-mapping.xml exist in local
+        gerrit_env = GerritEnv()
+        obs_target_prjs = git_obs_map(self._abs.project, self._abs.branch, \
+                                      gitcache=gerrit_env.gitcache, \
+                                      gerrit_hostname=gerrit_env.hostname, \
+                                      gerrit_username=gerrit_env.username, \
+                                      gerrit_sshport=gerrit_env.sshport)
+
+        if not obs_target_prjs:
+            print "PRJ %s(%s) does not map any OBS. Exit now." % (self._abs.project, self._abs.branch)
+            return
+
+        obs_target_prjs = [ x for x in obs_target_prjs \
+                            if x['OBS_project'] in os.getenv('SUPPORTED_PROFILES').split(',') ]
+        print 'After filter out redundant mappings: %s\n' % obs_target_prjs
+
+        return obs_target_prjs
+
+    def main(self):
+
+        my_build = ObsBuildService()
+
+        obs_map_list = self.query_git_obs_map()
+        if obs_map_list is None:
+            return
+
+        index = 1
+        for _target in self.query_git_obs_map():
+            _obs_prj = _target['OBS_project']
+            _stg = _target['OBS_staging_project']
+            _pkg = _target['OBS_package']
+            if _stg != 'abs':
+                continue
+            print "Trying to call builder for: Branch(%s), Package(%s), OBS(%s), Staging(%s)" \
+                   % (self._abs.branch, _pkg, _obs_prj, _stg)
+            try:
+                # Check existance of target OBS project 
+                non_exist_prjs = [prj for prj in set([_obs_prj]) if not my_build.build.exists(prj)]
+                if non_exist_prjs:
+                    print "Target OBS projects %s does not exist" % (non_exist_prjs)
+                    continue
+
+                # ref deleted check 
+                if is_ref_deleted(os.getenv("GERRIT_OLDREV"), os.getenv("GERRIT_NEWREV")):
+                    print "REF DELETED(%s)!" % (os.getenv("GERRIT_OLDREV"))
+                    #TODO: Delete prerelease obs project if it still exist
+                    continue
+
+                if _pkg is not None:
+                    obs_package = _pkg
+                else:
+                    obs_package = self._abs.package
+
+                trigger_data = {"obs_project": _obs_prj,
+                                "obs_package": obs_package,
+                                "source": 
+                                    {"package": self._abs.package, 
+                                     "branch": self._abs.branch, 
+                                     "tag": self._abs.tag},
+                                "event":
+                                    {"GERRIT_PROJECT":             self._abs.gerrit_event['project'],
+                                     "GERRIT_EVENT_ACCOUNT_EMAIL": self._abs.gerrit_event['email'],
+                                     "GERRIT_EVENT_ACCOUNT_NAME":  self._abs.gerrit_event['name'],
+                                     "GERRIT_NEWREV":              self._abs.gerrit_event['newrev'],
+                                     "GERRIT_REFNAME":             self._abs.gerrit_event['refname'],
+                                     "GERRIT_CHANGE_NUMBER":       self._abs.gerrit_event['changenumber'],
+                                     "GERRIT_BRANCH":              self._abs.gerrit_event['branch'],
+                                     "GERRIT_REFSPEC":             self._abs.gerrit_event['refspec'],
+                                     "GERRIT_PATCHSET_REVISION":   self._abs.gerrit_event['patchset']},
+                                "upstream": os.getenv('JOB_NAME'),
+                                "index": index}
+
+                trigger_next("abs_class_builder_%d_%d_%s" \
+                     % (int(os.getenv("BUILD_NUMBER")), index, self._abs.tag.split("/")[-1]), trigger_data)
+                index += 1
+            except Exception, ex:
+                print str(ex)
+                pass
+
+        if index > 1:
+            print "\"Title\": \"%s/%s\"" % (self._abs.package, self._abs.tag.replace('refs/',''))
+        print '---[ a b s    d i s p a t c h e r ] %s---' % str(datetime.now())
+
+def main(argv):
+    """
+    Script entry point.
+    """
+
+    print '---[SCRIPT START at %s]---' % str(datetime.now())
+
+    if os.getenv("TRIGGER_INFO"):
+        builder = Builder()
+        return builder.main()
+    else:
+        dispatcher = Dispatcher()
+        return dispatcher.main()
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except LocalError, err:
+        print err
+        sys.exit(1)
+
+
diff --git a/abs/job_abs_update_vm.py b/abs/job_abs_update_vm.py
new file mode 100755 (executable)
index 0000000..e4d15f1
--- /dev/null
@@ -0,0 +1,435 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+"""This job is triggered by job_create_sdkrootstrap.py 
+   to update VM images for ABS and RBS.
+"""
+
+import os
+import sys
+import shutil
+import re
+import stat
+import ast
+import subprocess
+from datetime import datetime
+from random import randint
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.utils import wget_noproxy, list_files_in_url
+from common.buildtrigger import trigger_info, trigger_next
+from common.git import Git, clone_gitproject
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+def run_inside_vm(vm_image, virt_root, snapshot_on):
+    """
+    Run build/run inside VM
+    """
+
+    ssh_connection = os.getenv('SSH_CONNECTION')
+    try:
+        lastip = int(ssh_connection.split(' ')[2].split('.')[3])
+    except (IndexError, ValueError, AttributeError):
+        print 'ssh_connection is %s, it is a incorrect format ,' \
+            'random instead' % ssh_connection
+        lastip = randint(0x00, 0xff)
+    exeid = int(os.getenv('EXECUTOR_NUMBER', '1')) % 256
+    processid = os.getpid() % 256
+    buildid = int(os.getenv('BUILD_NUMBER', randint(0x00, 0xff))) % 256
+    mac_address = '52:%02x:%02x:%02x:%02x:%02x' % \
+        (lastip, exeid, processid, buildid, randint(0x00, 0xff))
+    if snapshot_on:
+        opt_snapshot = ",snapshot=on "
+    else:
+        opt_snapshot = ""
+    cmd = 'qemu-system-x86_64 -machine accel=kvm:xen:tcg ' \
+          '-name ubuntu -M pc -m %d -smp %d ' \
+          '-drive file=%s%s '\
+          '-virtfs local,id=test_dev,path=%s,security_model=mapped,mount_tag=share ' \
+          '-net nic,macaddr=%s -net user -nographic' \
+          % (int(os.getenv("ABS_VM_MEMORY")), int(os.getenv("ABS_VM_CPUS")), \
+            vm_image, opt_snapshot, virt_root, mac_address)
+    print "Running kvm machine...\n %s" % (cmd)
+
+    subprocess.call(cmd, stdout=sys.stdout,
+                    stderr=sys.stderr, shell=True)
+
+    #read testresult from file
+    try:
+        with open(os.path.join(virt_root,'build','testresult')) as statusf:
+            status = statusf.read().strip()
+        print 'KVM returned [%s]' % (status)
+        return int(status)
+    except IOError, _err:
+        raise LocalError('KVM Failed')
+
+def _checkout_gitproject(prjdir, gerrit_project, git_branch, git_tag):
+    if not clone_gitproject(gerrit_project, prjdir, \
+                            gerrit_hostname=os.getenv("ABS_GERRIT_HOSTNAME"), \
+                            gerrit_username=os.getenv("ABS_GERRIT_USERNAME"), \
+                            gerrit_sshport=os.getenv("ABS_GERRIT_SSHPORT")):
+        print 'Error cloning project %s' % (gerrit_project)
+        return None
+    mygit = Git(prjdir)
+    print "Trying to checkout... %s %s %s" % (gerrit_project, git_branch, git_tag)
+    if not git_tag:
+        mygit.checkout(git_branch)
+    else:
+        mygit.checkout(git_tag)
+    return mygit
+
+def prepare_sdk_tool(builddir, sdk_tool_gerrit):
+    _path = sdk_tool_gerrit.split(',')[0]
+    _branch = sdk_tool_gerrit.split(',')[1]
+    prjdir = os.path.join(builddir, os.path.basename(_path))
+    _git = _checkout_gitproject(prjdir, _path, _branch, None)
+    if _git is not None:
+        return _git.path
+    return None
+
+def prepare_working_directory(work_dir):
+    basedir = os.path.join(os.getenv("WORKSPACE"), work_dir)
+    if os.path.exists(basedir):
+        shutil.rmtree(basedir)
+    builddir = os.path.join(basedir, 'build')
+    print 'basedir=%s, builddir=%s ' % (basedir, builddir)
+    os.makedirs(builddir)
+    return basedir, builddir
+def wget_sdk_rootstrap(rs_url, builddir):
+    rs_device = ""
+    rs_emulator = ""
+    file_list = list_files_in_url(rs_url)
+    print file_list
+
+    for r_file in file_list:
+        print "Check %s" % r_file
+        if re.compile(r'[%s]-3.0-rs-device.core.*.zip' % os.getenv('LIST_PROFILES')).search(r_file) is not None:
+            rs_device = os.path.basename(r_file)
+            print "Downloading file %s" % (rs_device)
+        elif re.compile(r'[%s]-3.0-rs-emulator.core.*.zip' % os.getenv('LIST_PROFILES')).search(r_file) is not None:
+            rs_emulator = os.path.basename(r_file)
+            print "Downloading file %s" % (rs_emulator)
+        else:
+            continue
+
+        wget_noproxy(r_file, os.path.join(builddir, os.path.basename(r_file)))
+
+    return rs_device[:-4], rs_emulator[:-4]
+
+def generate_update_abs_cmd(rs_version, rs_url, package_server, tool_path, wrapper_path):
+    # build command line
+    SDK_PATH = "/home/build/tizen-sdk-cli" 
+    SHARE_ROOT = "/share/build" 
+    TOOL_PATH = os.path.join(SHARE_ROOT, os.path.basename(tool_path)) if tool_path else ''
+    WRAPPER_PATH = os.path.join(SHARE_ROOT, os.path.basename(wrapper_path)) if wrapper_path else ''
+    PROFILE = rs_version.split('_')[0].split('-')[-1]
+
+    buildcmd = '$!/bin/bash\nset -x\n' \
+        'SDK_PATH=%s; SDK_CMD=$SDK_PATH/tools/ide/bin/tizen; SHARE_ROOT=%s; ' \
+        'TOOL_PATH=%s; WRAPPER_PATH=%s; PROFILE=%s; PACKAGE_SERVER=%s\n' \
+        'ABS_CMD=/home/build/abs; TOOL_CMD=/home/build/sdk-build/pkg-cli\n' \
+        'if [ ! -z $TOOL_PATH ]; then su - build -c "cp -rf $TOOL_PATH /home/build/"; fi\n' \
+        'if [ ! -z $WRAPPER_PATH ]; then su - build -c "cp -pf $WRAPPER_PATH/abs $ABS_CMD; chmod +x $ABS_CMD"; fi\n' \
+        'LIST="$SDK_CMD list rootstrap "\n' \
+        'UPDATER="$SDK_PATH/update-manager/update-manager-cli.bin"\n' \
+        'export DISPLAY=:0\n' \
+        'su - build -c "df -hT"\n' \
+        'if [ -f $UPDATER ]; then\n' \
+        '  mylist=`su - build -c "$UPDATER show-repo-info -r $PACKAGE_SERVER -d tizen_3.0 ' \
+        '  | grep TizenSDK | cut -f 2 -d \' \'"`\n' \
+        '  IFS=\' \' read -r -a snapshot <<< ${mylist}\n' \
+        '  if [ ! -z ${snapshot[0]} ]; then\n' \
+        '    su - build -c "$UPDATER install WebCLI -r $PACKAGE_SERVER -d tizen_3.0 ' \
+        '    -s ${snapshot[0]} --remove-installed-sdk"\n' \
+        '    su - build -c "$UPDATER install `echo \"$PROFILE\" | awk \'{print toupper($0)}\'`-3.0-' \
+        '    NativeAppDevelopment-CLI -s ${snapshot[0]} --accept-license --remove-installed-sdk"\n' \
+        '  fi\n' \
+        'else\n ' \
+        '  UPDATER="$SDK_PATH/package-manager/package-manager-cli.bin"\n' \
+        '  #su - build -c "$UPDATER update"\n' \
+        'fi\n' \
+        'su - build -c "tizen list rootstrap"\n' \
+        'su - build -c "tizen version"\n' \
+        'if [ $PROFILE == "unified" ]; then\n' \
+        '  rm -rf $SDK_PATH/tools/smart-build-interface/plugins/mobile-*.xml\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-3.0/mobile/rootstraps/\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-4.0/mobile/rootstraps/\n' \
+        '  rm -rf $SDK_PATH/tools/smart-build-interface/plugins/wearable-*.xml\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-3.0/wearable/rootstraps/\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-4.0/wearable/rootstraps/\n' \
+        'else\n' \
+        '  rm -rf $SDK_PATH/tools/smart-build-interface/plugins/${PROFILE}-*.xml\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-3.0/$PROFILE/rootstraps/\n' \
+        '  rm -rf $SDK_PATH/platforms/tizen-4.0/$PROFILE/rootstraps/\n' \
+        'fi\n' \
+        'wget --no-proxy -P $SHARE_ROOT/ -r -nd -np -R index.html* %s/ &> /dev/null\n' \
+        'rs_list=`ls $SHARE_ROOT | grep .*-rs.*.zip`; if [ $? != 0 ]; then exit 6; fi\n' \
+        'for tgt in $rs_list; do \n' \
+        '  disp_v=`echo ${tgt/-rs/} | sed -rn \'s/(.*.private).*/\\1/p\'`;\n' \
+        '  su - build -c "$TOOL_CMD install-file -P $SHARE_ROOT/${tgt} -l $SDK_PATH --force" \n' \
+        '  echo ret_val = \"$?\" \n' \
+        '  if [ "$disp_v"  == `$LIST | grep ${disp_v} | sed -rn \'s/(.*.private).*/\\1/p\'` ];\n' \
+        '    then echo "OK"\n' \
+        '  else \n' \
+        '    echo "Rootstrap update failure"; exit 9\n' \
+        '  fi\n' \
+        'done \n' \
+        'su - build -c "$LIST"\n' \
+        'su - build -c "$UPDATER show-info"\n' \
+        'su - build -c "rm -rf /home/build/.update-manager/run/*"\n' \
+        'su - build -c "rm -rf /home/build/.package-manager/run/*"\n' \
+        'su - build -c "df -hT"\n' \
+        % (SDK_PATH, SHARE_ROOT, TOOL_PATH, WRAPPER_PATH, PROFILE, package_server, rs_url)
+
+    return buildcmd
+
+#TODO: 64-bit support for creating kvm image...
+def generate_create_abs_cmd(installer, package_server, sign_data):
+    # build command line
+    BUILD_ROOT = "/home/build"
+    SDK_PATH = os.path.join(BUILD_ROOT, "tizen-sdk-cli")
+    SDK_KEY_PATH = os.path.join(BUILD_ROOT, "tizen-sdk-cli-data", "keystore")
+    SHARE_ROOT = "/share/build"
+
+    buildcmd = '$!/bin/bash \n' \
+        'set -x\n' \
+        'egrep "vmx|svm" /proc/cpuinfo\n' \
+        'LIST_PROFILES=%s\n' \
+        'SHARE_ROOT=%s\nBUILD_ROOT=%s\nSDK_PATH=%s\nSDK_KEY_PATH=%s\n' \
+        'UPDATER="$SDK_PATH/update-manager/update-manager-cli.bin"\n' \
+        'INSTALLER=%s\nPACKAGE_SERVER=%s\n' \
+        'KEY_PATH=%s\nKEY_A=$BUILD_ROOT/$KEY_PATH/%s\nKEY_A_P=%s\n' \
+        'KEY_D=$BUILD_ROOT/$KEY_PATH/%s\nKEY_D_P=%s\n' \
+        'add-apt-repository -y ppa:webupd8team/java\n' \
+        'echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections\n' \
+        'echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections\n' \
+        'apt-get update\n' \
+        'apt-get install -y oracle-java7-installer oracle-java7-set-default\n' \
+        'apt-get install -y zip build-essential gettext libwebkitgtk-1.0-0 libglib2.0-0 libcurl3-gnutls ' \
+        ' libsdl1.2debian libglib2.0-0 acl zlib1g libpixman-1-0 ' \
+        ' bridge-utils openvpn git ruby-full xdg-utils xmlstarlet rpm2cpio\n' \
+        'wget --no-proxy $INSTALLER -O $SHARE_ROOT/`basename $INSTALLER` \n' \
+        'export DISPLAY=:0\nchmod +x $SHARE_ROOT/`basename $INSTALLER`\n' \
+        'sed -i \'/10.112.1.184/d\' /etc/environment\n' \
+        'su - build -c "$SHARE_ROOT/`basename $INSTALLER` --accept-license $SDK_PATH"\n' \
+        'su - build -c "$UPDATER install WebCLI -r $PACKAGE_SERVER -d tizen_3.0 --remove-installed-sdk"\n' \
+        'su - build -c "$UPDATER install MOBILE-3.0-NativeAppDevelopment-CLI --accept-license"\n' \
+        'su - build -c "$UPDATER install WEARABLE-3.0-NativeAppDevelopment-CLI --accept-license"\n' \
+        'su - build -c "echo PATH=\$PATH:$SDK_PATH/tools/ide/bin >> ~/.profile"\n' \
+        'su - build -c "mkdir -p $SDK_KEY_PATH/; touch $SDK_KEY_PATH/profiles.xml"\n' \
+        'cp -rf $SHARE_ROOT/$KEY_PATH $BUILD_ROOT/$KEY_PATH; chown -R build:build $BUILD_ROOT/$KEY_PATH/\n' \
+        'su - build -c "tizen cli-config -g default.profiles.path=\"$SDK_KEY_PATH/profiles.xml\""\n' \
+        'su - build -c "tizen security-profiles add -n ABS -a $KEY_A -p $KEY_A_P -d $KEY_D -dp $KEY_D_P"\n' \
+        'rm -rf $SDK_PATH/tools/smart-build-interface/plugins/{${LIST_PROFILES}}-3.0*.xml\n' \
+        'rm -rf $SDK_PATH/platforms/tizen-3.0/{${LIST_PROFILES}}/rootstraps\n' \
+        'su - build -c "tizen list rootstrap"\n' \
+        % (os.getenv('LIST_PROFILES'), \
+           SHARE_ROOT, BUILD_ROOT, SDK_PATH, SDK_KEY_PATH, \
+           installer, package_server, \
+           sign_data['path'], sign_data['author_key'], sign_data['author_pwd'], \
+           sign_data['distributor_key'], sign_data['distributor_pwd'] \
+           )
+
+    return buildcmd
+
+def prepare_job(vm_image, vm_image_new, version):
+    basedir, builddir = prepare_working_directory(os.getenv('JOB_NAME'))
+    if not vm_image or not os.access(vm_image, os.R_OK):
+        raise LocalError('vm_image %s not found' % vm_image)
+    shutil.copyfile(vm_image, vm_image_new)
+    os.chmod(vm_image_new, os.stat(vm_image_new).st_mode | stat.S_IWRITE)
+    print "copied from " + vm_image + " to " + vm_image_new
+
+    return basedir, builddir
+
+def main_job(vm_image_new, builddir, basedir, buildcmd):
+    with open(os.path.join(builddir, 'run'), 'w') as fcmdl:
+        fcmdl.write('%s' % buildcmd)
+    os.chmod(os.path.join(builddir, 'run'), 0777)
+    print buildcmd
+
+    #### Running QEMU to launch Tizen SDK CLI build ####
+    print "[ s d k - k v m    s t a r t ] %s " % (str(datetime.now()))
+    sys.stdout.flush()
+    ret = run_inside_vm(vm_image_new, basedir, False)
+    print "[ s d k - k v m    f i n i s h ] %s " % (str(datetime.now()))
+
+    if int(ret) != 0:
+        os.remove(vm_image_new)
+        raise LocalError('KVM return failure')
+
+def post_job(vm_image, raw_new_image):
+
+    vm_image_new = raw_new_image.replace('-FAIL', '')
+    if raw_new_image.endswith('-FAIL'):
+        os.remove(vm_image_new)
+        raise LocalError('Build check failed for %s, DO NOT UPDATE IT!' % (vm_image_new))
+
+    if not os.path.exists(vm_image_new) or not os.path.isfile(vm_image_new):
+        return False
+
+    #### Rename the VM image ####
+    try:
+        os.rename(vm_image_new, vm_image)
+        #os.chmod(vm_image, os.stat(vm_image).st_mode ^ stat.S_IWRITE)
+    except OSError as e:
+        print e.errno
+        print e.strerror
+        os.remove(vm_image_new)
+        raise LocalError('Fail to rename from %s to %s' % (vm_image_new, vm_image))
+
+    #### Distribute to workers ####
+    #TODO: list slaves here except current one...
+    return True
+
+def create_abs(contents):
+    """
+    Create ABS VM entry point.
+    """
+
+    print '---[ CREATE ABS ] %s---' % str(datetime.now())
+
+    vm_image = os.getenv('BLANK_VM_IMAGE')
+    vm_image_new = vm_image + ".v-" + contents['version']
+    basedir, builddir = prepare_job(vm_image, vm_image_new, contents['version'])
+   
+    #### Download SDK installer
+    sdk_file = contents['sdk_file']
+    #wget_noproxy(os.path.join(contents['sdk_release_url'], sdk_file), \
+    #                          os.path.join(builddir, sdk_file))
+
+    #### Fetch signer ####
+    prepare_sdk_tool(builddir, os.getenv('SDK_SIGN_GIT'))
+    sign_p = os.path.basename(os.getenv('SDK_SIGN_GIT').split(',')[0])
+    sign_data = {'path': sign_p, \
+                 'author_key': os.getenv('SDK_SIGN_AUTHOR').split(',')[0], \
+                 'author_pwd': os.getenv('SDK_SIGN_AUTHOR').split(',')[1], \
+                 'distributor_key': os.getenv('SDK_SIGN_DIST').split(',')[0], \
+                 'distributor_pwd': os.getenv('SDK_SIGN_DIST').split(',')[1]}
+
+    #### Generate script to run(install required packages and SDK) inside VM ####
+    buildcmd = generate_create_abs_cmd(sdk_file, os.getenv('ABS_SDK_PACKAGE_SERVER'), sign_data)
+
+    # Main Routine
+    main_job(vm_image_new, builddir, basedir, buildcmd)
+
+    # Post Routine
+    post_job(os.getenv('ABS_VM_IMAGE'), vm_image_new)
+    
+    print "\"Title\": \"ABS_Create/%s\"" % (contents['version'])
+    print '---[ CREATE ABS ] %s---' % str(datetime.now())
+
+def update_abs(contents):
+    """
+    Update ABS VM entry point.
+    """
+
+    print '---[ UPDATE ABS ] %s---' % str(datetime.now())
+
+    kvm_root = '/'.join(os.getenv('ABS_VM_IMAGE').split('/')[:-1])
+    vf = os.path.join(kvm_root, 'abs-rootstrap-' + contents['version'])
+
+    vm_image = os.getenv('ABS_VM_IMAGE')
+    identifier = '-'.join(contents['project'].lower().split(':')[:-1])
+    #TODO: HYOKEUN
+    if contents['project'] == 'Tizen:Unified':
+        print 'Set tizen-unified'
+        identifier = 'tizen-unified'
+    vm_image = vm_image.replace('REL_VER', identifier)
+    vm_image_new = vm_image + ".v-" + contents['version']
+
+    # Stage 2 of 2
+    if post_job(vm_image, vm_image_new) == True:
+        # Refresh version file
+        for _file in os.listdir(kvm_root):
+            if _file.startswith('abs-rootstrap-' + contents['version'].split('_')[0]):
+                os.remove(os.path.join(kvm_root, _file))
+        subprocess.call('touch {}'.format(vf), shell=True)
+        print "\"Title\": \"ABS_Update/%s\"" % (contents['version'])
+        return
+
+    # check pre-installed version
+    #if os.path.exists(vf):
+    #    print 'You already installed %s' % contents['version']
+    #    return
+
+    # Stage 1 of 2
+    for _file in os.listdir(kvm_root):
+        if _file.startswith(os.path.basename(vm_image) + '.v-' + contents['version'].split('_')[0]):
+            print 'Another instance already running... %s' % _file
+            #os.remove(os.path.join(kvm_root, _file))
+    basedir, builddir = prepare_job(vm_image, vm_image_new, contents['version'])
+
+    #### Fetch sdk-rootstrap ####
+    #rs_device, rs_emulator = wget_sdk_rootstrap(contents['rs_url'], builddir)
+
+    #### Fetch sdk-tool ####
+    sdk_tool_path = prepare_sdk_tool(builddir, os.getenv('SDK_TOOL_GIT'))
+
+    #### Prepare local sdk cli wrapper script ####
+    cli_wrapper_path = prepare_sdk_tool(builddir, os.getenv('ABS_CLI_SCRIPT_GIT'))
+
+    #### Generate script to run(building app and make .tpk) inside VM ####
+    buildcmd = generate_update_abs_cmd(contents['version'], \
+                                       contents['rs_url'], \
+                                       os.getenv('ABS_SDK_PACKAGE_SERVER'), \
+                                       sdk_tool_path, cli_wrapper_path)
+   
+    # Main Routine
+    main_job(vm_image_new, builddir, basedir, buildcmd)
+
+    #### Request build all packages with new rootstrap
+    trigger_data = {'project': contents['project'],
+                   'version': contents['version']}
+    trigger_next("full_build_request_%s_%s" \
+                 % (contents['project'], contents['version']), trigger_data)
+
+    print '---[ UPDATE ABS ] %s---' % str(datetime.now())
+
+def update_none(args):
+    raise LocalError('Invalid parameters')
+    
+def main(argv):
+    """
+    Script entry point.
+    """
+
+    print '---[JOB STARTED]-------------------------'
+
+    options = {'sdk_rootstrap_updated': update_abs,
+               'sdk_released': create_abs,
+               'default': update_none}
+
+    if os.getenv("TRIGGER_INFO"):
+        print "TRIGGER_INFO exist, trigger builder script"
+        fields = trigger_info(os.getenv("TRIGGER_INFO"))
+        return options.get(fields['title'] , update_none)(fields['contents'])
+    else:
+        print "****************TEST*****************"
+        test_data = ast.literal_eval(os.getenv("TRIGGER_TEST"))
+        for key in test_data.keys():
+            print "%s='%s'" % (key, test_data[key])
+        return options.get(test_data['title'], update_none)(test_data['contents'])
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except LocalError, err:
+        print err
+        sys.exit(1)
+
diff --git a/abs/report_template b/abs/report_template
new file mode 100644 (file)
index 0000000..414bf56
--- /dev/null
@@ -0,0 +1,479 @@
+<html>
+<head>
+  <title></title>
+  <!--<script src="./jquery-2.2.4.min.js"></script>-->
+  <script src="https://code.jquery.com/jquery-2.2.4.min.js"></script>
+  <!--<script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.1.4/Chart.min.js"></script>-->
+  <script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.3.0/Chart.min.js"></script>
+  <script src="http://code.jquery.com/ui/1.12.0/jquery-ui.min.js"></script>
+  <link rel="stylesheet" href="http://code.jquery.com/ui/1.12.0/themes/smoothness/jquery-ui.css" type="text/css" />
+  <style>
+    .container {
+  width: 95%;
+  margin: 10px auto;
+}
+</style>
+</head>
+
+<body>
+
+  <div class="container">
+    <h5>
+    <div><form>
+      Date : <input type="text" name="dateFrom" id="dateFrom" /> ~ <input type="text" name="dateTo" id="dateTo" />
+    </form></div>
+    <div><form>Package : <input id="TextPackage"></form></div>
+    <!--
+    <div><form>Package : <select id="SelectPackage"><option value="All">All</option></select></form></div>
+    -->
+    <div><form>Profile : <select id="SelectProfile"><option value="All">All</option></select></form></div>
+    <div><form>Branch : <select id="SelectBranch"><option value="All">All</option></select></form></div>
+    </h5>
+    <h3>Success Ratio</h3>
+    <div id="divSuccessRatio">
+      <canvas id="SuccessRatio" height="150"></canvas>
+    </div>
+    <h3>Build Requests</h3>
+    <div id="divBuildCount">
+      <canvas id="BuildCount"></canvas>
+    </div>
+    <h3>Git Repository Size Growth (KB)</h3>
+    <div id="divGitRepoSize">
+      <canvas id="GitRepoSize"></canvas>
+    </div>
+  </div>
+
+  <script type="text/javascript">
+    var raw_data = RAW_DATA_FROM_BACKEND;
+  </script>
+
+  <script type="text/javascript">
+    dateStart = null; dateEnd = null; setPackage = null; setProfile = null; setBranch = null;
+    $( function() {
+      $( "#dateFrom" ).datepicker({
+        changeMonth: true, changeYear: true, dateFormat: "yymmdd",
+        onSelect: function (dateText, inst) { dateStart = dateText; reloadPage(); }
+      });
+    });
+    $( function() {
+      $( "#dateTo" ).datepicker({
+        changeMonth: true, changeYear: true, dateFormat: "yymmdd",
+        onSelect: function (dateText, inst) { dateEnd = dateText; reloadPage(); }
+      });
+    });
+    /*
+    $( "#SelectPackage" ).change(function() {
+      $( "#SelectPackage option:selected" ).each(function() {
+        setPackage = $( this ).text(); reloadPage();
+      });
+    });
+    */
+    $( "#SelectProfile" ).change(function() {
+      $( "#SelectProfile option:selected" ).each(function() {
+        setProfile = $( this ).text(); reloadPage();
+      });
+    });
+    $( "#SelectBranch" ).change(function() {
+      $( "#SelectBranch option:selected" ).each(function() {
+        setBranch = $( this ).text(); reloadPage();
+      });
+    });
+    var availablePackages = [];
+    $("#TextPackage").autocomplete({ 
+      source: availablePackages,
+      change: function() {
+        setPackage = $(this).val(); reloadPage();
+      }
+    });
+    var BuildCountCtx;
+    var GitRepoSizeCtx;
+    var sizeData;
+    function eventClickCount(evt) {
+      setPackage = BuildCountCtx.getElementAtEvent(evt)[0]._model.datasetLabel;
+      $("#TextPackage").val(setPackage);
+      reloadPage();
+    }
+    function eventClickGit(evt) {
+      _dIdx = GitRepoSizeCtx.getElementAtEvent(evt)[0]._datasetIndex;
+      _idx = GitRepoSizeCtx.getElementAtEvent(evt)[0]._index;
+      setPackage = sizeData.datasets[_dIdx].label;
+      $("#TextPackage").val(setPackage);
+      reloadPage();
+    }
+    
+    Colors = [
+      {_hash: null, _color: "#006400", _name: "dark green"},
+      {_hash: null, _color: "#FF4500", _name: "orange red"},
+      {_hash: null, _color: "#2F4F4F", _name: "dark slate gray"},
+      {_hash: null, _color: "#4682B4", _name: "steel blue"},
+      {_hash: null, _color: "#00008B", _name: "dark blue"},
+      {_hash: null, _color: "#8B008B", _name: "dark magenta"},
+      {_hash: null, _color: "#FF00FF", _name: "magenta / fuchsia"},
+      {_hash: null, _color: "#8B4513", _name: "saddle brown"},
+      {_hash: null, _color: "#000000", _name: "black"},
+      {_hash: null, _color: "#708090", _name: "slate gray"},
+      {_hash: null, _color: "#00CED1", _name: "dark turquoise"},
+      {_hash: null, _color: "#228B22", _name: "forest green"},
+      {_hash: null, _color: "#FFD700", _name: "gold"},
+      {_hash: null, _color: "#FF0000", _name: "red"},
+      {_hash: null, _color: "#FFFF00", _name: "yellow"},
+      {_hash: null, _color: "#800000", _name: "maroon"},
+      {_hash: null, _color: "#A0522D", _name: "sienna"},
+      {_hash: null, _color: "#2E8B57", _name: "sea green"},
+      {_hash: null, _color: "#8A2BE2", _name: "blue violet"},
+      {_hash: null, _color: "#00FFFF", _name: "aqua"},
+      {_hash: null, _color: "#C71585", _name: "medium violet red"},
+      {_hash: null, _color: "#0000FF", _name: "blue"},
+      {_hash: null, _color: "#32CD32", _name: "lime green"},
+      {_hash: null, _color: "#B8860B", _name: "dark golden rod"},
+      {_hash: null, _color: "#A52A2A", _name: "brown"},
+      {_hash: null, _color: "#3CB371", _name: "medium sea green"},
+      {_hash: null, _color: "#1E90FF", _name: "dodger blue"},
+      {_hash: null, _color: "#9932CC", _name: "dark orchid"},
+      {_hash: null, _color: "#D2691E", _name: "chocolate"},
+      {_hash: null, _color: "#696969", _name: "dim gray / dim grey"},
+      {_hash: null, _color: "#4169E1", _name: "royal blue"},
+      {_hash: null, _color: "#008B8B", _name: "dark cyan"},
+      {_hash: null, _color: "#FF6347", _name: "tomato"},
+      {_hash: null, _color: "#808000", _name: "olive"},
+      {_hash: null, _color: "#00FF00", _name: "lime"},
+      {_hash: null, _color: "#6A5ACD", _name: "slate blue"},
+      {_hash: null, _color: "#BC8F8F", _name: "rosy brown"},
+      {_hash: null, _color: "#7CFC00", _name: "lawn green"},
+      {_hash: null, _color: "#556B2F", _name: "dark olive green"},
+      {_hash: null, _color: "#0000CD", _name: "medium blue"},
+      {_hash: null, _color: "#9400D3", _name: "dark violet"},
+      {_hash: null, _color: "#DC143C", _name: "crimson"},
+      {_hash: null, _color: "#FFA500", _name: "orange"},
+      {_hash: null, _color: "#008000", _name: "green"},
+      {_hash: null, _color: "#6495ED", _name: "corn flower blue"},
+      {_hash: null, _color: "#EE82EE", _name: "violet"},
+      {_hash: null, _color: "#778899", _name: "light slate gray"},
+      {_hash: null, _color: "#20B2AA", _name: "light sea green"},
+      {_hash: null, _color: "#BDB76B", _name: "dark khaki"},
+      {_hash: null, _color: "#CD5C5C", _name: "indian red"},
+      {_hash: null, _color: "#FF8C00", _name: "dark orange"},
+      {_hash: null, _color: "#9ACD32", _name: "yellow green"},
+      {_hash: null, _color: "#8FBC8F", _name: "dark sea green"},
+      {_hash: null, _color: "#4B0082", _name: "indigo"},
+      {_hash: null, _color: "#FF1493", _name: "deep pink"},
+      {_hash: null, _color: "#CD853F", _name: "peru"},
+      {_hash: null, _color: "#B0C4DE", _name: "light steel blue"},
+      {_hash: null, _color: "#DA70D6", _name: "orchid"},
+      {_hash: null, _color: "#808080", _name: "gray / grey"},
+      {_hash: null, _color: "#DEB887", _name: "burly wood"},
+      {_hash: null, _color: "#FFB6C1", _name: "light pink"},
+      {_hash: null, _color: "#7B68EE", _name: "medium slate blue"},
+      {_hash: null, _color: "#87CEEB", _name: "sky blue"},
+      {_hash: null, _color: "#008080", _name: "teal"},
+      {_hash: null, _color: "#7FFF00", _name: "chart reuse"},
+      {_hash: null, _color: "#FF7F50", _name: "coral"},
+      {_hash: null, _color: "#00BFFF", _name: "deep sky blue"},
+      {_hash: null, _color: "#BA55D3", _name: "medium orchid"},
+      {_hash: null, _color: "#A9A9A9", _name: "dark gray / dark grey"},
+      {_hash: null, _color: "#FFDAB9", _name: "peach puff"},
+      {_hash: null, _color: "#DB7093", _name: "pale violet red"},
+      {_hash: null, _color: "#191970", _name: "midnight blue"},
+      {_hash: null, _color: "#40E0D0", _name: "turquoise"},
+      {_hash: null, _color: "#ADFF2F", _name: "green yellow"},
+      {_hash: null, _color: "#DAA520", _name: "golden rod"},
+      {_hash: null, _color: "#F08080", _name: "light coral"},
+      {_hash: null, _color: "#8B0000", _name: "dark red"},
+      {_hash: null, _color: "#90EE90", _name: "light green"},
+      {_hash: null, _color: "#483D8B", _name: "dark slate blue"},
+      {_hash: null, _color: "#DDA0DD", _name: "plum"},
+      {_hash: null, _color: "#F4A460", _name: "sandy brown"},
+      {_hash: null, _color: "#C0C0C0", _name: "silver"},
+      {_hash: null, _color: "#FFC0CB", _name: "pink"},
+      {_hash: null, _color: "#48D1CC", _name: "medium turquoise"},
+      {_hash: null, _color: "#F0E68C", _name: "khaki"},
+      {_hash: null, _color: "#FA8072", _name: "salmon"},
+      {_hash: null, _color: "#00FF7F", _name: "spring green"},
+      {_hash: null, _color: "#000080", _name: "navy"},
+      {_hash: null, _color: "#FF69B4", _name: "hot pink"},
+      {_hash: null, _color: "#5F9EA0", _name: "cadet blue"},
+      {_hash: null, _color: "#6B8E23", _name: "olive drab"},
+      {_hash: null, _color: "#EEE8AA", _name: "pale golden rod"},
+      {_hash: null, _color: "#9370DB", _name: "medium purple"},
+      {_hash: null, _color: "#B0E0E6", _name: "powder blue"},
+      {_hash: null, _color: "#800080", _name: "purple"},
+      {_hash: null, _color: "#F5DEB3", _name: "wheat"},
+      {_hash: null, _color: "#D8BFD8", _name: "thistle"},
+      {_hash: null, _color: "#00FA9A", _name: "medium spring green"},
+      {_hash: null, _color: "#87CEFA", _name: "light sky blue"},
+      {_hash: null, _color: "#B22222", _name: "firebrick"},
+    ];
+    function hexToRgb(hex, a) {
+        // Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF")
+        var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i;
+        hex = hex.replace(shorthandRegex, function(m, r, g, b) {
+            return r + r + g + g + b + b;
+        });
+        var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+        return result ? "rgba(" 
+            + parseInt(result[1], 16) + ','
+            + parseInt(result[2], 16) + ','
+            + parseInt(result[3], 16) + ','
+            + a + ")"
+        : null;
+    }
+    function getColor(_package, a) {
+      for (var i=0; i<Colors.length; i++) {
+        if (Colors[i]._hash == _package) { return hexToRgb(Colors[i]._color, a); }
+        if (Colors[i]._hash == null) { 
+          Colors[i]._hash = _package; 
+          $('#SelectPackage').append($('<option>', { value : _package }).text(_package)); 
+          availablePackages.push(_package);
+          return hexToRgb(Colors[i]._color, a); 
+        }
+      }
+    }
+    
+    var one_time_init_done = false;
+    var filteredData = [];
+    function filterData(_from, _to, _package, _profile, _branch) {
+      filteredData = [];
+      for (var i=0; i<raw_data.length; i++) {
+        if (one_time_init_done == false) {
+          var __prof = raw_data[i].package.split('/')[1];
+          var __bran = raw_data[i].tag.split('/')[3];
+          $('#SelectProfile').append($('<option>', { value : __prof }).text(__prof)); 
+          $('#SelectBranch').append($('<option>', { value : __bran }).text(__bran)); 
+        }
+        if (_package != null && raw_data[i].package != _package) { continue; }
+        if (_profile != null && raw_data[i].package.indexOf('/'+_profile+'/') == -1) { continue; }
+        if (_branch != null && raw_data[i].tag.indexOf('/'+_branch+'/') == -1) { continue; }
+        if (_to != null && raw_data[i].date.substring(0,8) > _to) { continue; }
+        if (_from != null && raw_data[i].date.substring(0,8) < _from) { continue; }
+        filteredData.push(raw_data[i]);
+      }
+      if (one_time_init_done == false) { 
+        one_time_init_done = true; 
+        var found = [];
+        $("#SelectProfile option").each(function() {
+          if($.inArray(this.value, found) != -1) $(this).remove();
+          found.push(this.value);
+        });
+        found2 = [];
+        $("#SelectBranch option").each(function() {
+          if($.inArray(this.value, found2) != -1) $(this).remove();
+          found2.push(this.value);
+        });
+      }
+    }
+  </script>
+  
+  <!-- SUCCESS RATIO -->
+  <script type="text/javascript">
+    function createSuccessRatioChart() {
+      document.getElementById("divSuccessRatio").innerHTML = '<canvas id="SuccessRatio" height="150"></canvas>';
+      var successData = { labels : [],  datasets : [] };
+      // Find lables
+      for (var i=0; i<filteredData.length; i++) {
+        if (filteredData[i].result != "SUCCESS") { continue; }
+        if ($.inArray(filteredData[i].date.substring(0,8), successData.labels) === -1) {
+          successData.labels.push(filteredData[i].date.substring(0,8));
+        }
+      }
+      successData.labels.sort();
+      // Initial values
+      successData.datasets.push({label: 'SUCCESS',
+                                data: new Array(successData.labels.length).fill(0),
+                                backgroundColor: "rgba(75, 192, 192, 1)"
+                              });
+      successData.datasets.push({label: 'FAILURE',
+                                data: new Array(successData.labels.length).fill(0),
+                                backgroundColor: "rgba(255,99,132,1)"
+                              });
+      // Fill data
+      for (var j=0; j<filteredData.length; j++) {
+        data_index = successData.labels.indexOf(filteredData[j].date.substring(0,8));
+        if (filteredData[j].result == "SUCCESS") {
+          successData.datasets[0].data[data_index] += 1;
+        }
+        else {
+          successData.datasets[1].data[data_index] += 1;
+        }
+      }
+      
+      var ctx = document.getElementById('SuccessRatio').getContext('2d');
+      SuccessRatioCtx = new Chart(ctx, {
+        type: 'line', data: successData,
+        options: {
+          maintainAspectRatio: false,
+          legend: { display: false },
+          scales: {
+            xAxes: [{
+                stacked: true
+            }],
+            yAxes: [{
+                stacked: true
+            }]
+          },
+          //onClick: eventClickSuccess,
+        }
+      });
+    }
+  </script>
+
+  <!-- BUILD COUNT -->
+  <script type="text/javascript">
+    function createBuildCountChart() {
+      document.getElementById("divBuildCount").innerHTML = '<canvas id="BuildCount"></canvas>';
+      var countData = { labels : [],  datasets : [] };
+      // Find lables
+      for (var i=0; i<filteredData.length; i++) {
+        if (filteredData[i].result != "SUCCESS") { continue; }
+        if ($.inArray(filteredData[i].date.substring(0,8), countData.labels) === -1) {
+          countData.labels.push(filteredData[i].date.substring(0,8));
+        }
+      }
+      countData.labels.sort();
+      // Initial values
+      for (var i=0; i<filteredData.length; i++) {
+        if (filteredData[i].result != 'SUCCESS') { continue; }
+        found = false;
+        for (var j=0; j<countData.datasets.length; j++) {
+          if (countData.datasets[j].label == filteredData[i].package) { found = true; break; }
+        }
+        if (found == false) {
+          countData.datasets.push({label: filteredData[i].package,
+                                  data: new Array(countData.labels.length).fill(0),
+                                  pass: new Array(countData.labels.length).fill(0),
+                                  fail: new Array(countData.labels.length).fill(0),
+                                  backgroundColor: getColor(filteredData[i].package, 0.1),
+                                  borderWidth: 1,
+                                  borderColor: getColor(filteredData[i].package, 1)
+                                 });
+        }
+      }
+      // Fill data
+      for (var i=0; i<countData.datasets.length; i++) {
+        for (var j=0; j<filteredData.length; j++) {
+          if (filteredData[j].package != countData.datasets[i].label) { continue; }
+          data_index = countData.labels.indexOf(filteredData[j].date.substring(0,8));
+          countData.datasets[i].data[data_index] += 1;
+          if (filteredData[j].result == 'SUCCESS') {
+            countData.datasets[i].pass[data_index] += 1;
+          } else {
+            countData.datasets[i].fail[data_index] += 1;
+          }
+        }
+      }
+      
+      var ctx = document.getElementById('BuildCount').getContext('2d');
+      BuildCountCtx = new Chart(ctx, {
+        type: 'bar', data: countData,
+        options: {
+          legend: { display: false },
+          scales: {
+            xAxes: [{
+                stacked: true
+            }],
+            yAxes: [{
+                stacked: true
+            }]
+          },
+          tooltips: { enabled: true, mode: 'single',
+            callbacks: {
+              title: function(tooltipItems, data) {
+                return (data.datasets[tooltipItems[0].datasetIndex].label);
+              },
+              label: function(tooltipItems, data) {
+                return "PASS: " + data.datasets[tooltipItems.datasetIndex].pass[tooltipItems.index]
+                       + ", FAIL: " + data.datasets[tooltipItems.datasetIndex].fail[tooltipItems.index];
+              }
+            },
+          },
+          onClick: eventClickCount,
+        }
+      });
+    }
+    
+  </script>
+
+  <!-- GIT REPOSITORY SIZE -->
+  <script type="text/javascript">
+    function createGitSizeChart() {
+      document.getElementById("divGitRepoSize").innerHTML = '<canvas id="GitRepoSize"></canvas>';
+      sizeData = { labels : [],  datasets : [] };
+      // Find lables
+      for (var i=0; i<filteredData.length; i++) {
+        if (filteredData[i].result != "SUCCESS" || filteredData[i].sizein == "0") { continue; }
+        if ($.inArray(filteredData[i].date.substring(0,8), sizeData.labels) === -1) {
+          sizeData.labels.push(filteredData[i].date.substring(0,8));
+        }
+      }
+      sizeData.labels.sort();
+      // Initial values
+      for (var i=0; i<filteredData.length; i++) {
+        if (filteredData[i].result != 'SUCCESS' || filteredData[i].sizein == '0') { continue; }
+        found = false;
+        for (var j=0; j<sizeData.datasets.length; j++) {
+          if (sizeData.datasets[j].label == filteredData[i].package) { found = true; break; }
+        }
+        if (found == false) {
+          sizeData.datasets.push({label: filteredData[i].package,
+                                  data: new Array(sizeData.labels.length),
+                                  backgroundColor: getColor(filteredData[i].package, 0.1),
+                                  borderWidth: 1,
+                                  borderColor: getColor(filteredData[i].package, 1),
+                                  pointRadius: 2,
+                                  fill: false,
+                                  spanGaps: true,
+                                 });
+        }
+      }
+      // Fill data
+      for (var i=0; i<sizeData.datasets.length; i++) {
+        for (var j=0; j<filteredData.length; j++) {
+          if (filteredData[j].package != sizeData.datasets[i].label) { continue; }
+          if (filteredData[j].sizein == '0') { continue; }
+          data_index = sizeData.labels.indexOf(filteredData[j].date.substring(0,8));
+          if (sizeData.datasets[i].data[data_index] == null) {
+            sizeData.datasets[i].data[data_index] = filteredData[j].sizein;
+          }
+        }
+      }
+      
+      var ctx = document.getElementById('GitRepoSize').getContext('2d');
+      GitRepoSizeCtx = new Chart(ctx, {
+        type: 'line', data: sizeData,
+        options: {
+          legend: { display: false },
+          tooltips: { enabled: true, mode: 'single',
+            callbacks: {
+              title: function(tooltipItems, data) {
+                return (data.datasets[tooltipItems[0].datasetIndex].label);
+              },
+              label: function(tooltipItems, data) {
+                return (tooltipItems.yLabel / 1024).toFixed(2) + " MB";
+              }
+            },
+          },
+          onClick: eventClickGit,
+        }
+      });
+    }
+  </script>
+  
+  <script type="text/javascript">
+  </script>
+  
+  <script type="text/javascript">
+    function reloadPage() {
+      if (setPackage == 'All') { setPackage = null; }
+      if (setProfile == 'All') { setProfile = null; }
+      if (setBranch == 'All') { setBranch = null; }
+      console.log(setPackage, setProfile, setBranch);
+      filterData(dateStart, dateEnd, setPackage, setProfile, setBranch);
+      createGitSizeChart();
+      createBuildCountChart();
+      createSuccessRatioChart();
+    }
+    reloadPage();
+  </script>
+  
+  </body>
+</html>
+
index 2a1862a..d204f12 100755 (executable)
@@ -634,17 +634,18 @@ class BuildService(OSC):
         if 'packages' in info:
             saved_info['packages'] = list(set(packages + info['packages']))
         if 'submitter' in info:
-            if not info['submitter'] in submitter:
-                saved_info['submitter'] = (submitter + ',' + info['submitter'])
-            else:
-                saved_info['submitter'] = submitter
+            saved_info['submitter'] = ','.join(list(set(submitter.split(',') \
+                                      + info['submitter'].split(','))))
         if 'images' in info:
             if info['images']:
                 # remove the old one if already exist
                 for image in images:
-                    if info['images'][0]['name'] == image['name'] and \
-                       info['images'][0]['repo'] == image['repo']:
-                        images.remove(image)
+                    if info['images'][0]['name'] == image['name']:
+                        if 'repo' in info['images'][0] and 'repo' in image:
+                            if info['images'][0]['repo'] == image['repo']:
+                                images.remove(image)
+                        else:
+                            images.remove(image)
 
                 saved_info['images'] = images + info['images']
             else:
@@ -1289,7 +1290,7 @@ class BuildService(OSC):
             print 'get_pkgrev_from_snapshot http_GET(%s) error' % u
             return None
 
-    def get_source_viewinfo(self, prj, nofilename=1, parse=0):
+    def get_source_viewinfo(self, prj, parse=0, nofilename=1):
         """
         Get source viewinfo of the project
         """
index cc235d8..231f88f 100644 (file)
 #    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
 """A build trigger"""
 
+import os
+import sys
 import base64
 import json
+from time import sleep, strftime, gmtime
+from jenkinsapi.jenkins import Jenkins
+import urllib3
 
 from common.utils import unicode_to_str
+# remote jenkins build job
+import requests
 
-def trigger_next(job_name, data):
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+class JenkinsError(Exception):
+    """Local error exception."""
+    pass
+
+def trigger_next(job_name, data, show=True, extra_params={}):
     """Trigger next job"""
 
     print "====LOG:TRIGGER_NEXT:%s =======================" % job_name.upper()
     with open("%s.env" % job_name, 'w') as info_file:
         info_file.write('TRIGGER_INFO=%s\n' % \
                 base64.b64encode(json.dumps(data)))
+        for k in extra_params:
+            v = extra_params[k]
+            info_file.write('%s=%s\n' % (k, v))
     for key in data.keys():
         print "%s='%s'" % (key, data[key])
+    for k in extra_params:
+        v = extra_params[k]
+        print "%s='%s'" % (k, v)
     print "====LOG:TRIGGER_NEXT:======================end=="
 
 def trigger_info(_trigger_info):
@@ -38,3 +60,135 @@ def trigger_info(_trigger_info):
     print json.dumps(content_dict, indent=4)
     print "====LOG:TRIGGER_INFO:======================end=="
     return content_dict
+
+def get_jenkins_instance(job, cred=None):
+
+    if cred is None:
+        cred = {'url': os.getenv('JENKINS_URL'), \
+                'username': os.getenv('JENKINS_USER'), \
+                'password': os.getenv('JENKINS_PASS')}
+
+    try:
+        jenkins = Jenkins(cred['url'], cred['username'], cred['password'])
+    except Exception, error:
+        raise JenkinsError("Can't connect to jenkins: %s" % str(error))
+
+    if job not in jenkins:
+        raise JenkinsError("Job %s doesn't exist" % job)
+
+    return jenkins
+
+def trigger_jenkins_build(job, parameters, cred=None, block=False):
+
+    jenkins = get_jenkins_instance(job, cred)
+
+    qitem = jenkins[job].invoke(block=block, build_params=parameters)
+
+    if block:
+        build = qitem.get_build()
+        return build.get_number(), build.get_status(), build.get_console()
+    else:
+        return qitem.queue_id
+
+def monitor_jenkins_build_with_queue_id(job, queue_id=None, cred=None):
+
+    jenkins = get_jenkins_instance(job, cred)
+
+    ret_val = {'queue_id': queue_id,
+               'build_number': None,
+               'building': True,
+               'result': None,
+               'returns': None}
+
+    # Check queue first
+    if queue_id:
+        for pending_build in jenkins.get_queue().get_queue_items_for_job(job):
+            if pending_build.queue_id == queue_id:
+                return ret_val
+
+    # Check build
+    j = jenkins.get_job(job)
+    builds = j.poll(tree='builds[number,building,queueId,result,actions[parameters[name,value]]]')
+    for build in builds['builds']:
+        if (queue_id and str(queue_id) == str(build['queueId'])):
+            ret_val['build_number'] = build['number']
+            if build['building']:
+                return ret_val
+            else:
+                ret_val['building'] = False
+                ret_val['result'] = build['result']
+                # Get PBS_RETURN_VALUE
+                for action in build['actions']:
+                    if 'parameters' not in action:
+                        continue
+                    for parameter in action['parameters']:
+                        if 'name' in parameter and parameter['name'] == 'PBS_RETURN_VALUE':
+                            ret_val['returns'] = parameter['value']
+                return ret_val
+
+    return ret_val
+
+def monitor_jenkins_build(job, queue_id=None, cred=None, retry_seconds=10, get_return=False):
+
+    while True:
+        sys.stdout.flush()
+        sleep(retry_seconds)
+        ret = monitor_jenkins_build_with_queue_id(job, queue_id=queue_id, cred=cred)
+        if ret['building']:
+            continue
+        if get_return == True:
+            return ret['result'], ret['returns']
+        else:
+            return ret['result']
+
+def get_jenkins_build_data(job=None, build_num=None, cred=None, tree=None):
+
+    jenkins = get_jenkins_instance(job, cred)
+
+    j = jenkins.get_job(job)
+    if tree is None:
+        tree = ['number', 'building', 'queueId', 'result', 'timestamp', 'duration', 'actions[text,parameters[name,value]]']
+    builds = j.poll(tree='builds[%s]' % ','.join(tree))
+
+    for build in builds['builds']:
+        if build_num and build_num == str(build['number']):
+            return build
+        elif build_num is None:
+            return builds['builds']
+
+def remote_jenkins_build_job( url, username, password, jobname, token=None, data=None, files=None):
+    """ remote jenkins build job"""
+    print 'remote jenkins job '
+    if url and username and password and jobname:
+        if token:
+            url = '%s/job/%s/buildWithParameters?token=%s' \
+                  %(url, jobname, token)
+        else:
+            url = '%s/job/%s/buildWithParameters?' \
+                  %(url, jobname)
+        try:
+
+            if files:
+                file_formdata_arr = []
+                for (key, filename) in files:
+                    file_formdata_arr.append((key, (filename, open(filename,'r').read())))
+
+                filedata, content_type = urllib3.encode_multipart_formdata(file_formdata_arr)
+
+                resp = requests.post(url, params=data, data=filedata,
+                                    auth=(username, password),
+                                    headers={"content-type":content_type}
+                                    )
+            else :
+                resp = requests.post(url, params=data,
+                                    auth=(username, password),
+                                    )
+            status_code = resp.status_code
+            print status_code
+        except requests.exceptions.Timeout as e:
+            print(e)
+        except requests.exceptions.ConnectionError as e:
+            print(e)
+        except Exception as e:
+            raise Exception(e)
+
index 57a3954..bf9d3aa 100644 (file)
@@ -20,7 +20,7 @@
 Git module inherited from GitRepository and self customed
 """
 
-import os
+import os, sys
 import shutil
 
 from common import runner
@@ -29,7 +29,7 @@ from common.utils import retry
 from gbp.git.repository import GitRepository
 from gbp.git.repository import GitRepositoryError
 import gbp.log as log
-
+from gbp.git.args import GitArgs
 
 class GitError(Exception):
     """Local exception."""
@@ -100,13 +100,26 @@ def _update_gitproject(localdir, gitpath=None):
     try:
         localgit = Git(localdir)
 
-        if gitpath and ((not localgit.get_remote_repos()) \
-                or localgit.get_remote_repos().get('origin')[0] != gitpath):
+        repo_fail = True
+        if localgit.get_remote_repos():
+            repo_origin = localgit.get_remote_repos().get('origin')[0]
+            if repo_origin.endswith('.git'):
+                repo_origin = repo_origin[:-4]
+            if repo_origin == gitpath:
+                repo_fail = False
+        if gitpath and repo_fail:
             shutil.rmtree(localdir)
             return False
 
+        print 'OK. We have local git repo and trying to update it'
+        sys.stdout.flush()
         if localgit.bare:
-            localgit.fetch(all_remotes=True)
+            localgit.fetch(tags=True, all_remotes=True)
+            # Current gbp does not support --prune option, Call it directly.
+            args = GitArgs('--quiet')
+            args.add_true(True, '--prune')
+            args.add_true(True, '--all')
+            localgit._git_command('fetch', args.args)
         else:
             localgit.fetch(tags=True)
             try:
@@ -138,6 +151,8 @@ def _clone_gitproject(giturl, gerritprj, localdir, bare=False, git_cache_dir=Non
         else:
             cache_dir = os.path.join(git_cache_dir, gerritprj) + '.git'
 
+        print 'No local repo or updating it failed... Use .git instead into %s' % cache_dir
+        sys.stdout.flush()
         if os.path.isdir(cache_dir):
             # use local cache repo as reference to clone
             gitcmd = 'git clone %s/%s --reference %s %s %s' % \
@@ -148,6 +163,8 @@ def _clone_gitproject(giturl, gerritprj, localdir, bare=False, git_cache_dir=Non
                    (giturl, gerritprj, localdir,
                     '--mirror' if bare else '')
 
+        print 'Cloning it with gitcmd: %s' % gitcmd
+        sys.stdout.flush()
         if runner.show(gitcmd)[0] != 0:
             result = False
     except (TypeError, AttributeError, OSError), err:
@@ -190,4 +207,3 @@ def fetch_change(gerritprj, localdir, refspec, giturl=None, bare=False, gerrit_h
     git = Git.create(localdir, bare)
     git.fetch(repo=giturl, refspec=refspec)
     git.checkout('FETCH_HEAD')
-
index f0d10fa..4fc1d3b 100644 (file)
@@ -82,6 +82,45 @@ class Prerelease(object):
                                           'packages')
         return pkg_urls
 
+class Trbs(object):
+    """Handle trbs meta data."""
+
+    def __init__(self, snapshot, base_url, tstamp, buildid):
+        self.snapshot = snapshot
+        self.base_url = base_url
+        self.tstamp = tstamp
+        self.buildid = buildid
+        self.snap_buildid = '%s_%s' % (self.snapshot.repo['Release'], self.buildid)
+
+    @property
+    def build_id(self):
+        """Trbs build id."""
+        return '%s_%s.%s' % (self.snapshot.repo['Release'], self.buildid, self.tstamp)
+
+    @property
+    def dir(self):
+        """Trbs directory."""
+        if self.snapshot.repo['PrereleaseDir']:
+            return os.path.join(self.snapshot.repo['PrereleaseDir'].replace(
+                                'prerelease','trbs'),
+                                self.snap_buildid)
+        else:
+            return False
+
+    @property
+    def path(self):
+        """Trbs path."""
+        return os.path.join(self.snapshot.base_path, self.dir)
+
+    def pkg_urls(self, repo):
+        """List of package(repository) urls for trbs."""
+        pkg_urls = {}
+        for arch in self.snapshot.archs(repo):
+            pkg_urls[arch] = os.path.join(self.base_url, self.dir,
+                                          self.build_id, 'repos',
+                                          repo,
+                                          'packages')
+        return pkg_urls
 
 class Snapshot(object):
     """Snapshot maintenance class."""
@@ -173,6 +212,10 @@ class Snapshot(object):
         """Factory for Prerelease object."""
         return Prerelease(self, base_url, tstamp, buildid)
 
+    def get_trbs(self, base_url, tstamp, buildid):
+        """Factory for trbs object."""
+        return Trbs(self, base_url, tstamp, buildid)
+
 def snapshot_project_enabled(backenddb, obs_project):
     """Check if project is enabled for OBS project.
        This is done by querying repo name from Redis
diff --git a/common/trbs.py b/common/trbs.py
new file mode 100755 (executable)
index 0000000..dc5ddf3
--- /dev/null
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2 of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+
+import re
+
+"""
+APIs used in trbs scripts.
+"""
+
+def is_trbs_project(project_name):
+    """
+    Return true if the project name belong to trbs namespace
+    """
+    return project_name.startswith("home:trbs:")
+
+def get_trbs_project_name(obs_target_prj, git_tag_name):
+    """
+    Get trbs OBS project name from gerrit event parameters.
+
+    Prerelease project name format is:
+       home:trbs:<origin_target_obs_project>:<tag>
+
+    """
+    return "home:trbs:%s:%s" % (obs_target_prj,
+                                      git_tag_name.replace('/', ':'))
+
+def get_info_from_trbs_name(trbs_project):
+    """
+    Get target obs project and tag time stamp
+
+    """
+    for tag in (':submit:', ':accepted:', ':submitgroup:'):
+        splitted = trbs_project.split(tag)
+        if len(splitted) > 1:
+            return (':'.join(splitted[0].split(':')[2:]).split(':ref:')[0],
+                    ':'.join(splitted[0].split(':')[2:]).split(':ref:')[1],
+                    splitted[-1][splitted[-1].find(':',1)+1:])
+
+def get_ref_prj_from_trbs_name(project, tag):
+    """
+    Get reference obs project
+    """
+    return (':'.join(project.split(':'+tag.replace('/', ':'))[0].split(':')[2:]))
+
+def trbs_enabled(obs_project):
+    """
+    Check if trbs is enabled for OBS project.
+    This is done by querying repo data from Redis and checking out
+    """
+    import os
+
+    try:
+        if obs_project in os.getenv('TRBS_PROJECTS').split(','):
+            return True
+        else:
+            return False
+    except:
+        return False
index 5f7521b..9f2ad6e 100644 (file)
@@ -57,3 +57,31 @@ def upload_obs_service(git_url, git_project, git_tag,
     except ObsError, error:
         raise UploadError("Unable to upload _service to %s: %s" % \
                 (obs_project, error))
+
+def upload_obs_files(git_project, git_tag, git_revision, \
+                     obs_project, build, package, files):
+    """UPload _service file to OBS.Create package if doesn't exist.To replace
+    make package in local"""
+    if not build.exists(obs_project, package):
+        try:
+            build.create_package(obs_project, package)
+        except ObsError, error:
+            raise UploadError("Unable to create package %s/%s :%s" % \
+                    (obs_project, package, error))
+    print 'Commit files'
+    commit_message = 'uploaded by prerelease job to build %s/%s(%s)' % \
+            (git_project, git_tag, git_revision)
+    try:
+        if files is not None:
+            format_files = []
+            for _f in files:
+                print '+++ %s' % _f
+                format_files.append((_f, True))
+            print "Uploading files: %s" % format_files
+            print 'obs_project=%s, package=%s' % (obs_project, package)
+            build.commit_files(obs_project, package,
+                               format_files, commit_message)
+    except ObsError, error:
+        raise UploadError("Unable to upload files to %s: %s" % \
+                (obs_project, error))
+
index b64a1e6..d37b616 100644 (file)
@@ -25,6 +25,9 @@ import subprocess
 import re
 import shutil
 import xml.etree.cElementTree as ET
+import requests
+from bs4 import BeautifulSoup
+from urllib2 import urlopen, ProxyHandler, build_opener, install_opener, URLError, HTTPError
 
 from common import runner
 
@@ -169,6 +172,20 @@ def sync(source, destination, remove=True, hardlinks=False):
 
     return ret_code
 
+def sync_get(source, destination, remove=True):
+
+    ret_code = -1
+
+    if source.startswith('rsync:'):
+        cmd = "rsync -caz %s %s 2> /dev/null" % (source, destination)
+        try:
+            ret_code = subprocess.call(cmd, shell=True)
+        except OSError as err:
+            raise RuntimeException("Execution of %s failed: %s" %
+                                   (cmd, str(err)))
+
+    return ret_code
+
 def set_permissions(tpath, modes=(0644, 0755)):
     """
     Recursively set permission bits for files and directories.
@@ -324,3 +341,42 @@ def execute_shell(cmd, progress=False):
             return None
     return o
 
+def tail(f, n=30, c=2048, offset=0):
+    cmd = "tail %s -c %d" % (f, c)
+    return runner.outs(cmd)
+
+def list_files_in_url(url, ext=''):
+    page = requests.get(url).text
+    soup = BeautifulSoup(page, 'html.parser')
+    return [url + '/' + node.get('href') for node in soup.find_all('a') if node.get('href').endswith(ext)]
+
+def wget_noproxy(filefromurl, filesaveto):
+    try:
+        proxy_handler = ProxyHandler({})
+        opener = build_opener(proxy_handler)
+        install_opener(opener)
+        f = urlopen(filefromurl)
+        with open(filesaveto, "wb") as local_file:
+            local_file.write(f.read())
+    except HTTPError, e:
+        print "HTTP Error: %s %s", e.code, filefromurl
+        pass
+    except URLError, e:
+        print "URL Error: %s %s", e.reason, filefromurl
+        pass
+
+def grap_text_from_url(url):
+    html = urlopen(url).read()
+    soup = BeautifulSoup(html)
+
+    # rip all script and style elements out
+    for script in soup(["script", "style"]):
+        script.extract()
+
+    text = soup.get_text()
+    lines = (line.strip() for line in text.splitlines())
+    chunks = (phrase.strip() for line in lines for phrase in line.split("  "))
+    text = '\n'.join(chunk for chunk in chunks if chunk)
+
+    return text
\ No newline at end of file
index 1c8d9da..aabfdc5 100644 (file)
@@ -18,7 +18,10 @@ Depends:  ${python:Depends},
  jenkins-scripts-common,
  jenkins-scripts-submitobs,
  jenkins-scripts-tzs,
- jenkins-scripts-init,
+ jenkins-scripts-dependsgraph,
+ jenkins-scripts-trbs,
+ jenkins-scripts-abs,
+ jenkins-scripts-groovyinit,
  python-mysqldb
 Description: Jenkins build job will call this scripts
   when build job is triggered by Gerrit/OBS event.
@@ -57,7 +60,25 @@ Depends:  ${python:Depends},
  jenkins-scripts-common
 Description: Isolated job_submitobs to avoid package installation conflicts
 
-Package: jenkins-scripts-init
+Package: jenkins-scripts-groovyinit
 Architecture: all
 Description: Groovy init scripts for jenkins startup
 
+Package: jenkins-scripts-dependsgraph
+Architecture: all
+Description: Dependency Graph Generation tool
+Depends:  ${python:Depends},
+ jenkins-scripts-common
+
+Package: jenkins-scripts-trbs
+Architecture: all
+Description: Tizen Reference Build System scripts
+Depends:  ${python:Depends},
+ jenkins-scripts-common
+
+Package: jenkins-scripts-abs
+Architecture: all
+Description: App Build System scripts
+Depends:  ${python:Depends},
+ jenkins-scripts-common
+
diff --git a/debian/jenkins-scripts-abs.install b/debian/jenkins-scripts-abs.install
new file mode 100644 (file)
index 0000000..423feb1
--- /dev/null
@@ -0,0 +1 @@
+debian/tmp/abs/* /var/lib/jenkins/jenkins-scripts/abs/
index 52527b5..b8455a4 100644 (file)
@@ -1,4 +1,27 @@
-debian/tmp/common/* /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/__init__.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/backenddb.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/builddata.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/buildservice.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/buildtrigger.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/gerrit.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/git.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/imagedata.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/mapping.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/obspkg.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/prerelease.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/repomaker.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/runner.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/send_mail.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/snapshot.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/utils.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/manifest.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/upload_service.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/iris_rest_client.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/apply_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/check_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/git_diff_parse.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/buildmonitor_db.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/git_obs_mapping.py /var/lib/jenkins/jenkins-scripts/common/
 debian/tmp/job_re.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_create_snapshot.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_release_snapshot.py /var/lib/jenkins/jenkins-scripts/
@@ -9,36 +32,10 @@ debian/tmp/job_mail_sender.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_load_repos.yaml.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_sync_obs.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_sync_snapdiff.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_update_local_git.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_monitor_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_check_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/templates/index.html /var/lib/jenkins/jenkins-scripts/templates/
-debian/tmp/scripts/check_section.sh /var/lib/jenkins/jenkins-scripts/scripts/
-debian/tmp/scripts/get_git_desc_info.sh /var/lib/jenkins/jenkins-scripts/scripts/
 debian/tmp/job_buildmonitor.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_ref_snapshot_info_update.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_ref_create_prj_obs.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_ref_import_rpm_obs.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_ref_purge_prj_obs.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_ref_precheck_project_obs.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_test_trigger_info_update.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_update_nuget.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/scripts/nuget.exe /var/lib/jenkins/jenkins-scripts/scripts/
-debian/tmp/job_add_git_tag.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_repa.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_trbs_test_result_receiver.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_update_scm_meta_git_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_update_git_obs_mapping_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_update_git_branch_project_mapping_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_litmus_jira_issue_receiver.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_litmus_tct_file_receiver.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_add_dotnet_launching_performance_test.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/job_make_dep_graph.py /var/lib/jenkins/jenkins-scripts/
-debian/tmp/common/dep_graph.html.template /var/lib/jenkins/jenkins-scripts/common/
-debian/tmp/common/dep_graph.html.template_simple /var/lib/jenkins/jenkins-scripts/common/
-debian/tmp/common/dep_parse.py /var/lib/jenkins/jenkins-scripts/common/
-debian/tmp/dep_graph/* /var/lib/jenkins/jenkins-scripts/dep_graph/
-debian/tmp/vis/* /var/lib/jenkins/jenkins-scripts/vis/
-debian/tmp/vis/dist/* /var/lib/jenkins/jenkins-scripts/vis/dist/
-debian/tmp/vis/dist/img/network/* /var/lib/jenkins/jenkins-scripts/vis/dist/img/network/
-debian/tmp/vis/dist/img/timeline/* /var/lib/jenkins/jenkins-scripts/vis/dist/img/timeline/
+debian/tmp/templates/index.html /var/lib/jenkins/jenkins-scripts/templates
+debian/tmp/job_update_local_git.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/scripts/check_section.sh /var/lib/jenkins/jenkins-scripts/scripts
+debian/tmp/scripts/get_git_desc_info.sh /var/lib/jenkins/jenkins-scripts/scripts
+debian/tmp/scripts/nuget.exe /var/lib/jenkins/jenkins-scripts/scripts
diff --git a/debian/jenkins-scripts-dependsgraph.install b/debian/jenkins-scripts-dependsgraph.install
new file mode 100644 (file)
index 0000000..8c3ef9e
--- /dev/null
@@ -0,0 +1,9 @@
+debian/tmp/job_make_dep_graph.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/common/dep_graph.html.template /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/dep_graph.html.template_simple /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/common/dep_parse.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/dep_graph/* /var/lib/jenkins/jenkins-scripts/dep_graph/
+debian/tmp/vis/* /var/lib/jenkins/jenkins-scripts/vis/
+debian/tmp/vis/dist/* /var/lib/jenkins/jenkins-scripts/vis/dist/
+debian/tmp/vis/dist/img/network/* /var/lib/jenkins/jenkins-scripts/vis/dist/img/network/
+debian/tmp/vis/dist/img/timeline/* /var/lib/jenkins/jenkins-scripts/vis/dist/img/timeline/
diff --git a/debian/jenkins-scripts-trbs.install b/debian/jenkins-scripts-trbs.install
new file mode 100644 (file)
index 0000000..cbf43be
--- /dev/null
@@ -0,0 +1,3 @@
+debian/tmp/common/trbs.py /var/lib/jenkins/jenkins-scripts/common/
+debian/tmp/job_trbs_test_result_receiver.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/trbs/* /var/lib/jenkins/jenkins-scripts/trbs/
index b0f6c30..9a255c1 100644 (file)
@@ -6,5 +6,31 @@ debian/tmp/job_request.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_test_build.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/job_rpm_buildlogs.py /var/lib/jenkins/jenkins-scripts/
 debian/tmp/obs_requests /var/lib/jenkins/jenkins-scripts/
+debian/tmp/common/tempbuildpkg.py /var/lib/jenkins/jenkins-scripts/common/
 debian/tmp/dir-purge-tool.sh /var/lib/jenkins/jenkins-scripts/
 debian/tmp/logs-collector.sh /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_monitor_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_check_scm_meta_git.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_ref_snapshot_info_update.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_ref_create_prj_obs.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_ref_import_rpm_obs.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_ref_purge_prj_obs.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_ref_precheck_project_obs.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_test_trigger_info_update.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_update_scm_meta_git_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_update_git_obs_mapping_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_update_git_branch_project_mapping_for_dashboard.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_litmus_jira_issue_receiver.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_litmus_tct_file_receiver.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_add_dotnet_launching_performance_test.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_update_nuget.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_add_git_tag.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_create_sdkrootstrap.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_find_incorrect_filenames.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_importrpm_obs.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_rsync_download.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_sync_repo.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_trigger_for_sync_repo.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_trigger_obs_sync.py /var/lib/jenkins/jenkins-scripts/
+debian/tmp/job_update_public_git.py /var/lib/jenkins/jenkins-scripts/
+
index 8e22636..4879bc0 100755 (executable)
@@ -23,7 +23,7 @@ install: build
        # Installing package
        mkdir -p $(CURDIR)/debian/tmp/
        install -d $(CURDIR)/debian/tmp/
-       cp -r job_*.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts dep_graph vis groovy_init_scripts $(CURDIR)/debian/tmp/
+       cp -r job_*.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts dep_graph vis groovy_init_scripts trbs abs $(CURDIR)/debian/tmp/
 binary-indep: build install
        dh_testdir
        dh_testroot
index 1d64b2f..3b6452b 100755 (executable)
@@ -24,6 +24,7 @@ This code is called when create snapshot.
 import os
 import sys
 import shutil
+import re
 
 from xml.dom import minidom
 
@@ -113,10 +114,17 @@ def main():
 
     project = content.get("project")
 
+    #TODO: backend slave selection.
+    backend_label = os.getenv('BACKEND_SELECTION', 'BACKEND_01')
+    if os.getenv('%s_REGEX' % backend_label) and \
+        re.search(r'%s' % os.getenv('%s_REGEX' % backend_label), project) is None:
+        print 'Backend(%s) selection mismatch for %s.' % (backend_label, project)
+        return
+
     obs_api = os.getenv("OBS_API_URL")
     obs_user = os.getenv("OBS_API_USERNAME")
     obs_passwd = os.getenv("OBS_API_PASSWD")
-
+   
     build = BuildService(obs_api, obs_user, obs_passwd)
 
     for repository in content.get("repo").keys():
@@ -174,5 +182,14 @@ def main():
     data['repo_path'] = content['repo_path']
     trigger_next("make_dep_graph", data)
 
+    # TRIGGER NEXT SYNC-AWS
+    if os.getenv("TRBS_SYNC_AWS_ENABLED", "0") != "0":
+        data = {"repo_path": content['repo_path']}
+        trigger_next('SYNC-AWS', data)
+
+    # TRIGGER NEXT RSYNC-DOWNLOAD (MIRROR)
+    if os.getenv("RSYNC_DOWNLOAD_ENABLED", "0") != "0":
+        trigger_next('RSYNC_DOWNLOAD', content)
+
 if __name__ == '__main__':
     sys.exit(main())
index 997c76a..b4f3197 100644 (file)
@@ -43,11 +43,13 @@ TABLE  : sr_status
 import os
 import sys
 import datetime
+import json
+import base64
 import re
 
 from time import sleep
 from common import buildmonitor_db
-from common.buildtrigger import trigger_info, trigger_next
+from common.buildtrigger import trigger_info, trigger_next, remote_jenkins_build_job
 
 class LocalError(Exception):
     """Local error exception."""
@@ -83,6 +85,34 @@ def bm_disconnect_db():
     buildmonitor_db.disconnect_db()
 
 #===============================================================================
+# for remote trigger from TRBS to PRE/POST
+def bm_remote_jenkins_build_job(remote_data):
+    print '[%s] remote data(%s)\n' % (__file__, remote_data)
+    remote_jenkins_build_job(os.getenv('PUBLIC_JENKINS_URL'), \
+                             os.getenv('PUBLIC_JENKINS_USER'), \
+                             os.getenv('PUBLIC_JENKINS_PW'), \
+                             os.getenv('PUBLIC_JOB_NAME'), \
+                             os.getenv('PUBLIC_JOB_TOKEN'), \
+                             'TRIGGER_INFO=%s\n' \
+                             %(base64.b64encode(json.dumps(remote_data))))
+
+def bm_remote_jenkins_build_job_by_file(job, remote_data):
+    print '[%s] remote data(%s)\n' % (__file__, remote_data)
+
+    with open('TRIGGER_INFO_FILE', 'w') as f:
+        f.write(base64.b64encode(json.dumps(remote_data)))
+
+    files = [("TRIGGER_INFO_FILE", "TRIGGER_INFO_FILE")]
+
+    remote_jenkins_build_job(os.getenv('PUBLIC_JENKINS_URL'), \
+                             os.getenv('PUBLIC_JENKINS_USER'), \
+                             os.getenv('PUBLIC_JENKINS_PW'), \
+                             job,
+                             os.getenv('PUBLIC_JOB_TOKEN'), \
+                             None,
+                             files)
+
+#===============================================================================
 # [job_submit.py]
 
 def get_src_proj_lst(bm_src_prj_lst):
@@ -1853,6 +1883,9 @@ def main():
     """
     bm_connect_db()
 
+    # TRBS Sender mode
+    trbs_buildmonitor_enabled = int(os.getenv("TRBS_BUILDMONITOR_ENABLED", "0"))
+
     # prevent base64 decoding error when triggered by the remote host
     trigger_info_data = os.getenv('TRIGGER_INFO').replace(' ', '+')
 
@@ -1925,6 +1958,13 @@ def main():
 
         update_sr_submit_log_completed(git_tag)
 
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Submit'
+            bm_remote_jenkins_build_job(remote_data)
+
     #=======================================================
     # [PRE] Build
     #=======================================================
@@ -1963,6 +2003,13 @@ def main():
 
         update_unresolvable_broken_packages(project, unresolvable_broken_packages)
 
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Pre_Snap_Start'
+            bm_remote_jenkins_build_job(remote_data)
+
     elif bm_stage == 'Pre_Snap_End' or bm_stage == '[TRBS]_Pre_Snap_End':
         print '[%s][Pre_Snap_End]\n' % (__file__)
 
@@ -2000,6 +2047,19 @@ def main():
                                                             bm_snapshot_num,
                                                             bm_end_datetime)
 
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Pre_Snap_End'
+            bm_spl_data = os.getenv('BUILDMONITOR_SPIN_URL')
+            if bm_spl_data in bm_snapshot_url:
+                remote_data['bm_snapshot_url'] = bm_snapshot_url.split(bm_spl_data)[1]
+            else:
+                print '[%s] check the bm_spl_data(%s)!!\n' \
+                      % (__file__, bm_spl_data)
+            bm_remote_jenkins_build_job(remote_data)
+
     elif bm_stage == 'Pre_Snap_packages' or bm_stage == '[TRBS]_Pre_Snap_packages':
         print '[%s][Pre_Snap_packages]\n' % (__file__)
 
@@ -2038,6 +2098,19 @@ def main():
                                                                      bm_pkg_count,
                                                                      BM_PKG_LIMIT)
 
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Pre_Snap_packages'
+            bm_spl_data = os.getenv('BUILDMONITOR_SPIN_URL')
+            if bm_spl_data in bm_pkg_url:
+                remote_data['bm_pkg_url'] = bm_pkg_url.split(bm_spl_data)[1]
+            else:
+                print '[%s] check the bm_spl_data(%s)!!\n' \
+                      % (__file__, bm_spl_data)
+            bm_remote_jenkins_build_job(remote_data)
+
     elif bm_stage == 'Pre_Snap_Fail' or bm_stage == '[TRBS]_Pre_Snap_Fail':
         print '[%s][Pre_Snap_Fail]\n' % (__file__)
 
@@ -2054,6 +2127,14 @@ def main():
         if bm_stage == '[TRBS]_Pre_Snap_Fail':
             print '[%s][[TRBS]_Pre_Snap_Fail]\n' % (__file__)
             TRBS_update_fail_status_for_sr_stage(project, bm_git_tag)
+
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Pre_Snap_Fail'
+            bm_remote_jenkins_build_job(remote_data)
+
     #=======================================================
     # [PRE]/[POST] Image
     elif bm_stage == 'Image' or bm_stage == '[TRBS]_Image':
@@ -2081,6 +2162,18 @@ def main():
             if bm_stage == '[TRBS]_Image':
                 print '[%s][[TRBS]_Image]\n' % (__file__)
                 TRBS_update_fail_create_image_for_sr_stage(fields, bm_start_datetime)
+
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Image'
+            (remote_data['fields'])['url'] = os.path.join(fields['repo_path'],
+                                                      fields['images_path'])
+            # prevent long param error
+            (remote_data['fields'])['kickstart'] = 0
+            bm_remote_jenkins_build_job(remote_data)
+
     #=======================================================
     # [PRE]/[POST] Post_Image
     elif bm_stage == 'Post_Image' or bm_stage == '[TRBS]_Post_Image':
@@ -2100,6 +2193,14 @@ def main():
         if bm_stage == '[TRBS]_Post_Image':
             print '[%s][[TRBS]_Post_Image]\n' % (__file__)
             TRBS_end_create_image_for_sr_stage(bm_start_datetime, project)
+
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['bm_stage'] = '[TRBS]_Post_Image'
+            bm_remote_jenkins_build_job(remote_data)
+
     #=======================================================
     # SR Accept or Reject
     elif bm_stage == 'SR_Accept':
@@ -2194,6 +2295,14 @@ def main():
         print "[%s][UPDATE_BUILD_LOG]\n" % (__file__)
         update_build_log(content)
 
+        # TRBS Sender mode
+        if trbs_buildmonitor_enabled:
+            # remote trigger BUILD-MONITOR job of public
+            remote_data = content
+            remote_data['base_url'] = content.get('base_url')[len(os.getenv("URL_PUBLIC_REPO_BASE")):]
+            remote_data['bm_stage'] = '[TRBS]_UPDATE_BUILD_LOG'
+            bm_remote_jenkins_build_job_by_file("BUILD-MONITOR-TRBS-UPDATE-BUILD-LOG", remote_data)
+
     elif bm_stage == "[TRBS]_UPDATE_BUILD_LOG":
         print "[%s][[TRBS]_UPDATE_BUILD_LOG]\n" % (__file__)
         update_build_log(content)
diff --git a/job_create_sdkrootstrap.py b/job_create_sdkrootstrap.py
new file mode 100755 (executable)
index 0000000..9fd5d6a
--- /dev/null
@@ -0,0 +1,501 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+"""This script is used to create SDK rootstrap"""
+
+import re
+import os
+import sys
+import shutil
+import subprocess
+import urllib2
+import codecs
+
+from common.buildtrigger import trigger_info, trigger_next
+from common.utils import sync, set_permissions, Workdir
+from common.git import Git, clone_gitproject
+
+from common.send_mail import prepare_mail
+
+# remote jenkins build job
+import requests
+
+EMAIL_TITLE = "[SDK RBS] has been trigger (%s Build Status : %s)"
+
+EMAIL_COUNT_MESSAGE  = "A SDK RootstrapBuildSystem has been trigger \n\n" \
+                       "- OBS project : %s \n" \
+                       "- Snapshot url : %s \n" \
+                       "- Basesnapshot url : %s \n" \
+                       "- SdkRootstrap url : %s \n" \
+                       "- Status : %s \n"
+
+EMAIL_FOOTER = '\n\n--------------------------------------------------------\n'\
+               'Automatically generated by SDK RootstrapBuildSystem service.\n'\
+               'Please DO NOT Reply!'
+
+EMAIL_CONTENT=[]
+
+profiles = [
+            {'name': '3.0-mobile',
+             'project': 'Tizen:3.0:Mobile',
+             'base_url': os.getenv('RBS_BASE_URL'),
+             'base_repo_path': 'public_mirror/tizen/3.0-base',
+             'repo_path': 'public_mirror/tizen/3.0-mobile',
+             'git_rs_prj': 'sdk/tools/sdk-image',
+             'public_cmd': 'rs_snapshot_public',
+             'private_cmd': 'rs_snapshot',
+             'git_branch': 'platform_3.0'},
+            {'name': '3.0-wearable',
+             'project': 'Tizen:3.0:Wearable',
+             'base_url': os.getenv('RBS_BASE_URL'),
+             'base_repo_path': 'public_mirror/tizen/3.0-base',
+             'repo_path': 'public_mirror/tizen/3.0-wearable',
+             'git_rs_prj': 'sdk/tools/sdk-image',
+             'public_cmd': 'rs_snapshot_public',
+             'private_cmd': 'rs_snapshot',
+             'git_branch': 'platform_3.0'},
+            {'name': 'tizen-unified',
+             'project': 'Tizen:Unified',
+             'base_url': os.getenv('RBS_BASE_URL'),
+             'base_repo_path': 'public_mirror/tizen/base',
+             'repo_path': 'public_mirror/tizen/unified',
+             'git_rs_prj': 'sdk/tools/sdk-image',
+             'public_cmd': 'rs_rbs_public',
+             'private_cmd': 'rs_rbs_private',
+             'git_branch': 'platform_4.0'},
+           ]
+
+profile = None
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+def setup_profile(project):
+    # set up profile
+    for l in profiles:
+        if project == l['project']:
+            profile = l
+            print 'project = %s' % (project)
+            return profile
+
+    return False
+
+def send_mail(title, msg, receiver):
+    """ post message back to gerrit and send mail to tag owner """
+    if 'author' in receiver and 'email' in receiver:
+        msg = 'Hi, %s\n\n' % receiver['author'] + msg + EMAIL_FOOTER
+        prepare_mail("%s.env" % os.getenv('BUILD_TAG'), title, msg,
+                     os.getenv('NOREPLY_EMAIL_SENDER'), receiver['email'])
+
+def copy_sshkey_to_vm(src, dst):
+    """
+    """
+    print 'src %s dst %s' % (src, dst)
+          
+    if os.path.exists(src):
+        shutil.copytree(src, dst)
+
+def run_inside_vm(vm_image, vm_memory, vm_cpus, basedir):
+    """
+    Run build/run inside VM
+    """
+    cmd = 'qemu-system-x86_64 -machine accel=kvm:xen:tcg -name '\
+          'ubuntu -M pc -m %d -smp %d -vga none -drive file=%s,'\
+          'snapshot=on -nographic -virtfs local,id=test_dev,'\
+          'path=%s,security_model=mapped,mount_tag=share ' % \
+          (vm_memory, vm_cpus, vm_image, basedir)
+    print "run cmd = %s" % (cmd)
+    #subprocess.call(cmd, stdout=sys.stdout,
+    #                stderr=sys.stderr, shell=True)
+    outfile = os.path.join(basedir, 'build', 'rs.log')
+    proc = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = proc.communicate()
+    print 'outfile %s' %out
+    if out :
+        with open(outfile, 'w') as f:
+            f.write('%s' % out)
+
+    #read testresult from file
+    try:
+        testresult = os.path.join(basedir,'build','testresult')
+        if not os.path.exists(testresult):
+            print "%s not exists file" % (testresult)
+
+        with open(os.path.join(basedir,'build','testresult')) as statusf:
+            status = statusf.read().strip()
+        print 'status = %s' % (status)
+        if int(status) == 0:
+            return 0
+        else:
+            return status
+    except IOError, _err:
+        print "error"
+        return -1
+
+def sync_to_download(src, dst, log_src):
+    """
+    Run Sync download server
+    """
+    rsync_cmd = 'rsync -avr %s/ %s' % (src, dst)
+    print 'rsync_cmd = %s' % (rsync_cmd)
+    proc = subprocess.Popen(rsync_cmd,shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = proc.communicate()
+
+    if err:
+        print ("Failed rsync download: %s" % out)
+
+    rsync_cmd = 'rsync -avr %s %s' % (log_src, dst)
+    print 'rsync_cmd = %s' % (rsync_cmd)
+    proc = subprocess.Popen(rsync_cmd,shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = proc.communicate()
+    if err:
+        print ("Failed rsync download: %s" % out)
+
+def remote_jenkins_build_job(sdkrootstrap_url):
+    """ remote jenkins build job"""
+    print 'remote jenkins job sdkrootstrap_url: %s ' % (sdkrootstrap_url)
+    if os.getenv('REMOTE_JENKINS_URL') and os.getenv('REMOTE_JENKINS_USER') \
+        and os.getenv('REMOTE_JENKINS_PW') and os.getenv('REMOTE_JOB_NAME'):
+        url = '%s/job/%s/buildWithParameters?token=%s' \
+              %(os.getenv('REMOTE_JENKINS_URL'), os.getenv('REMOTE_JOB_NAME'), \
+                os.getenv('REMOTE_JOB_TOKEN'))
+        username = os.getenv('REMOTE_JENKINS_USER')
+        password = os.getenv('REMOTE_JENKINS_PW')
+        data = {'NEW_ROOTSTRAP': sdkrootstrap_url}
+        try:
+            resp = requests.post(url, params=data,
+                                auth=(username, password)
+                                )
+            status_code = resp.status_code
+            print status_code
+        except requests.exceptions.Timeout as e:
+            print(e)
+        except requests.exceptions.ConnectionError as e:
+            print(e)
+        except Exception as e:
+            print(e)
+            raise Exception('exception')
+
+def make_rs_snapshot_private(git_prj, git_cache, mygit, fields, profile, vm_image):
+    """
+    Make Rootstrap snapshot(private api)
+    """
+    # checkout branch
+    git_branch = profile['git_branch']
+    mygit.checkout(git_branch)
+
+    rs = os.path.join(git_cache, git_prj, 'make_'+profile['private_cmd']+'.sh')
+    if not os.path.isfile(rs):
+        print '%s is not found' % rs
+        return 'failed'
+
+    build_id = fields["build_id"]
+    base_repo_path = profile['base_repo_path']
+
+    # Prepare working directory
+    rootstrap = 'private-sdk-rootstrap'
+    basedir = os.path.join(os.getenv('WORKSPACE'), rootstrap)
+    if os.path.exists(basedir):
+        shutil.rmtree(basedir)
+    builddir = os.path.join(basedir, 'build')
+    os.makedirs(builddir)
+
+    # copy sshkey to builddir
+    ssh_src_dir = os.path.join(os.getenv("JENKINS_HOME"),'.ssh')
+    ssh_dst_dir = os.path.join(builddir,'.ssh')
+    copy_sshkey_to_vm(ssh_src_dir,ssh_dst_dir)
+
+    # build command line
+    #'apt-get install -y xmlstarlet rpm2cpio ruby\n' \
+    git_host = os.getenv("GERRIT_HOSTNAME")
+    git_port = os.getenv("GERRIT_SSHPORT")
+
+    git_cmd = 'git clone ssh://%s:%s/%s -b %s' % ( git_host, git_port, git_prj, git_branch)
+    print 'git_cmd %s' %(git_cmd)
+
+    buildcmd = '#!/bin/bash \n' \
+               'BUILDDIR="/share/build"\nWORKDIR="/srv/work"\nRS="rbs_out"\nSDKIMAGE="sdk-image"\n' \
+               'chown -R build:build $BUILDDIR\nchmod 600 $BUILDDIR/.ssh/id_rsa \n' \
+               'cp -r "$BUILDDIR/.ssh/" /root/\nchmod 600 /root/.ssh/id_rsa\n' \
+               'mkdir -p "$WORKDIR"\ncd "$WORKDIR"\n%s\n' \
+               '$WORKDIR/$SDKIMAGE/make_%s.sh -u %s -b %s\n' \
+               '$WORKDIR/$SDKIMAGE/make_%s_64.sh -u %s -b %s\n' \
+               'cp -r $WORKDIR/$RS/ $BUILDDIR\n' % \
+               (git_cmd,
+                profile["private_cmd"],
+                os.path.join(profile["base_url"],profile["repo_path"],build_id),
+                os.path.join(profile["base_url"],profile["base_repo_path"],'latest'),
+                profile["private_cmd"],
+                os.path.join(profile["base_url"],profile["repo_path"],build_id),
+                os.path.join(profile["base_url"],profile["base_repo_path"],'latest'))
+
+    print buildcmd
+    with open(os.path.join(builddir, 'run'), 'w') as fcmdl:
+        fcmdl.write('%s' % buildcmd)
+
+    os.chmod(os.path.join(builddir, 'run'), 0777)
+
+    print 'starting BUILD inside VM to create sdk private rootstrap'
+    sys.stdout.flush()
+
+    ret = run_inside_vm(vm_image, int(os.getenv("RBS_VM_MEMORY", 8492)),
+                            int(os.getenv("RBS_VM_CPUS", 4)), basedir)
+        # workaround for qemu/9p bug in mapping permissions
+    set_permissions(builddir, (0644, 0755))
+
+    status = 'success'
+
+    if not int(ret) == 0:
+        print 'Error: sdk rootstrap returned %s' % ret
+        status = 'failed'
+
+    # sync sdk rootstrap image to SYNC_DEST
+    if os.getenv('IMG_SYNC_DEST_BASE'):
+        sync_to_download(os.path.join(builddir, 'rbs_out'),
+                         os.path.join(os.getenv('IMG_SYNC_DEST_BASE'),
+                         profile['repo_path'],build_id,'builddata', rootstrap),
+                         os.path.join(builddir, 'rs.log'))
+
+    #sync to RSYNC_SNAPSHOT
+    # check to folder
+    if os.getenv('RSYNC_SNAPSHOT'):
+        sync_to_download(os.path.join(builddir, 'rbs_out'),
+                         os.path.join(os.getenv('RSYNC_SNAPSHOT'),
+                         profile['repo_path'],build_id,'builddata', rootstrap),
+                         os.path.join(builddir, 'rs.log'))
+
+    if status == 'success':
+        print "The build was successful."
+        #### abs update ####
+        # notify remote jenkins build job
+        remote_rs_url = os.path.join(profile["base_url"],profile["repo_path"],build_id,'builddata', rootstrap)
+        remote_jenkins_build_job(remote_rs_url)
+
+    if os.getenv('IMG_SYNC_DEST_BASE') and os.getenv('RSYNC_SNAPSHOT'):
+        print os.getenv('URL_PUBLIC_REPO_BASE')
+        origin_IP = re.match(r'.*:\/\/([0-9.]{1,})\/.*', os.getenv('URL_PUBLIC_REPO_BASE')).group(1)
+        copy_IP = re.match(r'.*:\/\/([0-9.]{1,})\/.*', os.getenv('RSYNC_SNAPSHOT')).group(1)
+
+        snapshot_url = os.path.join(profile["base_url"].replace(origin_IP,copy_IP),profile["repo_path"],build_id)
+        basesnapshot_url = os.path.join(profile["base_url"].replace(origin_IP,copy_IP),profile["base_repo_path"],'latest')
+        sdkrootstrap_url = os.path.join(snapshot_url.replace(origin_IP,copy_IP), 'builddata', rootstrap)
+    else:
+        snapshot_url = os.path.join(profile["base_url"],profile["repo_path"],build_id)
+        basesnapshot_url = os.path.join(profile["base_url"],profile["base_repo_path"],'latest')
+        sdkrootstrap_url = os.path.join(snapshot_url, 'builddata', rootstrap)
+
+    if status == 'failed':
+    # Notify email to RBS_MAILINGLIST
+        title = EMAIL_TITLE % ( profile["project"], status )
+        msg = EMAIL_COUNT_MESSAGE % (profile["project"],
+                                     snapshot_url,
+                                     basesnapshot_url,
+                                     sdkrootstrap_url,
+                                     status)
+        recevier = { 'author' : 'RBS Administrator',
+                     'email'  : os.getenv('RBS_MAILINGLIST') }
+        if recevier:
+            send_mail(title, msg, recevier)
+
+    return status
+
+def make_rs_snapshot_public(git_prj, git_cache, mygit, fields, profile, vm_image):
+    """
+    Make Rootstrap snapshot(public api)
+    """
+
+    # checkout branch
+    git_branch = profile['git_branch']
+    mygit.checkout(git_branch)
+
+    rs = os.path.join(git_cache, git_prj, 'make_'+profile['public_cmd']+'.sh')
+    if not os.path.isfile(rs):
+        print '%s is not found' % rs
+        return 'failed'
+
+    build_id = fields["build_id"]
+    base_repo_path = profile['base_repo_path']
+
+
+    # Prepare working directory
+    rootstrap = 'public-sdk-rootstrap'
+    basedir = os.path.join(os.getenv('WORKSPACE'), rootstrap)
+    if os.path.exists(basedir):
+        shutil.rmtree(basedir)
+    builddir = os.path.join(basedir, 'build')
+    os.makedirs(builddir)
+
+    # copy sshkey to builddir
+    ssh_src_dir = os.path.join(os.getenv("JENKINS_HOME"),'.ssh')
+    ssh_dst_dir = os.path.join(builddir,'.ssh')
+    copy_sshkey_to_vm(ssh_src_dir,ssh_dst_dir)
+
+    # build command line
+    #'apt-get install -y xmlstarlet rpm2cpio ruby\n' \
+    git_host = os.getenv("GERRIT_HOSTNAME")
+    git_port = os.getenv("GERRIT_SSHPORT")
+
+    git_cmd = 'git clone ssh://%s:%s/%s -b %s' % ( git_host, git_port, git_prj, git_branch)
+    print 'git_cmd %s' %(git_cmd)
+
+    buildcmd = '#!/bin/bash \n' \
+               'BUILDDIR="/share/build"\nWORKDIR="/srv/work"\nRS="rbs_out"\nSDKIMAGE="sdk-image"\n' \
+               'chown -R build:build $BUILDDIR\nchmod 600 $BUILDDIR/.ssh/id_rsa \n' \
+               'cp -r "$BUILDDIR/.ssh/" /root/\nchmod 600 /root/.ssh/id_rsa\n' \
+               'mkdir -p "$WORKDIR"\ncd "$WORKDIR"\n%s\n' \
+               '$WORKDIR/$SDKIMAGE/make_%s.sh -u %s -b %s\n' \
+               '$WORKDIR/$SDKIMAGE/make_%s_64.sh -u %s -b %s\n' \
+               'cp -r $WORKDIR/$RS/ $BUILDDIR\n' % \
+               (git_cmd,
+                profile["public_cmd"],
+                os.path.join(profile["base_url"],profile["repo_path"],build_id),
+                os.path.join(profile["base_url"],profile["base_repo_path"],'latest'),
+                profile["public_cmd"],
+                os.path.join(profile["base_url"],profile["repo_path"],build_id),
+                os.path.join(profile["base_url"],profile["base_repo_path"],'latest'))
+
+    print buildcmd
+    with open(os.path.join(builddir, 'run'), 'w') as fcmdl:
+        fcmdl.write('%s' % buildcmd)
+
+    os.chmod(os.path.join(builddir, 'run'), 0777)
+
+    print 'starting BUILD inside VM to create sdk public rootstrap'
+    sys.stdout.flush()
+
+    ret = run_inside_vm(vm_image, int(os.getenv("RBS_VM_MEMORY", 8492)),
+                            int(os.getenv("RBS_VM_CPUS", 4)), basedir)
+        # workaround for qemu/9p bug in mapping permissions
+    set_permissions(builddir, (0644, 0755))
+
+    status = 'success'
+
+    if not int(ret) == 0:
+        print 'Error: sdk rootstrap returned %s' % ret
+        status = 'failed'
+
+    # sync sdk rootstrap image to SYNC_DEST
+    if os.getenv('IMG_SYNC_DEST_BASE'):
+        sync_to_download(os.path.join(builddir, 'rbs_out'),
+                         os.path.join(os.getenv('IMG_SYNC_DEST_BASE'),
+                         profile['repo_path'],build_id,'builddata', rootstrap),
+                         os.path.join(builddir, 'rs.log'))
+
+    #sync to RSYNC_SNAPSHOT
+    # check to folder
+    if os.getenv('RSYNC_SNAPSHOT'):
+        sync_to_download(os.path.join(builddir, 'rbs_out'),
+                         os.path.join(os.getenv('RSYNC_SNAPSHOT'),
+                         profile['repo_path'],build_id,'builddata', rootstrap),
+                         os.path.join(builddir, 'rs.log'))
+
+    if os.getenv('IMG_SYNC_DEST_BASE') and os.getenv('RSYNC_SNAPSHOT'):
+        origin_IP = re.match(r'.*:\/\/([0-9.]{1,})\/.*', os.getenv('URL_PUBLIC_REPO_BASE')).group(1)
+        copy_IP = re.match(r'.*:\/\/([0-9.]{1,})\/.*', os.getenv('RSYNC_SNAPSHOT')).group(1)
+
+        snapshot_url = os.path.join(profile["base_url"].replace(origin_IP,copy_IP),profile["repo_path"],build_id)
+        basesnapshot_url = os.path.join(profile["base_url"].replace(origin_IP,copy_IP),profile["base_repo_path"],'latest')
+        sdkrootstrap_url = os.path.join(snapshot_url.replace(origin_IP,copy_IP), 'builddata', rootstrap)
+    else:
+        snapshot_url = os.path.join(profile["base_url"],profile["repo_path"],build_id)
+        basesnapshot_url = os.path.join(profile["base_url"],profile["base_repo_path"],'latest')
+        sdkrootstrap_url = os.path.join(snapshot_url, 'builddata', rootstrap)
+
+    if status == 'failed':
+        # Notify email to RBS_MAILINGLIST
+        title = EMAIL_TITLE % ( profile["project"], status )
+        msg = EMAIL_COUNT_MESSAGE % (profile["project"],
+                                     snapshot_url,
+                                     basesnapshot_url,
+                                     sdkrootstrap_url,
+                                     status)
+        recevier = { 'author' : 'RBS Administrator',
+                     'email'  : os.getenv('RBS_MAILINGLIST') }
+        if recevier:
+            send_mail(title, msg, recevier)
+
+
+    return status
+
+def main():
+    """The main body"""
+
+    # Check if environment variables are set
+    for var in ('WORKSPACE', 'IMG_SYNC_DEST_BASE'):
+        if not os.getenv(var):
+            print 'Error: environment variable %s is not set' % var
+            return -1
+
+    fields = trigger_info(os.getenv('TRIGGER_INFO'))
+    base_path = os.getenv('PATH_REPO_BASE')
+
+    # Check if we've got required fieldsdk-rootstraps in TRIGGER_INFO
+    for field in ('build_id', 'project'):
+        if field not in fields:
+            print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+            return -1
+    
+    profile = setup_profile(fields['project'])
+    if not profile:
+        print 'Skip TRIGGER job for the project %s' % fields['project']
+        return 0
+
+    vm_image = os.getenv("RBS_VM_IMAGE")
+
+    if not vm_image:
+       print 'VM not vm_image'
+       return -1
+
+    if not os.access(vm_image, os.R_OK):
+       print 'VM not access'
+       return -1
+
+    # check if tarball exists
+    if not vm_image or not os.access(vm_image, os.R_OK):
+        print 'VM image %s is not found' % vm_image
+        return -1
+
+    # clone git sdk-image
+    git_prj = profile['git_rs_prj']
+
+    git_cache = os.getenv("GIT_CACHE_DIR")
+    print 'Cloning %s' % git_prj
+    if not clone_gitproject(git_prj, \
+            os.path.join(git_cache, git_prj)):
+        raise LocalError('Error cloning %s' % git_prj)
+
+    mygit = Git(os.path.join(git_cache, git_prj))
+
+    status = 'success'
+    #### Make Rootstrap Snapshot (PRIVATE)####
+    status = make_rs_snapshot_private(git_prj, git_cache, mygit, fields, profile, vm_image)
+
+    if status == 'success':
+        print "The RBS(Internal) build was successful."
+    else:
+        print "The RBS(Internal) build was failed."
+
+    status = 'success'
+    #### Make Rootstrap Snapshot (PUBLIC)####
+    status = make_rs_snapshot_public(git_prj, git_cache, mygit, fields, profile, vm_image)
+
+    if status == 'success':
+        print "The RBS(Public) build was successful."
+    else:
+        print "The RBS(Public) build was failed."
+
+    return 0
+
+if __name__ == "__main__":
+    sys.exit(main())
index f1cebe0..05eb230 100755 (executable)
@@ -330,8 +330,17 @@ def main():
     data = repo_data.copy()
     # remove unused item
     data.pop('imagedata')
-
-    trigger_next("post-snapshot", data)
+    parm_backend = {}
+    for bknd in ['BACKEND_02', 'BACKEND_01']:
+        if os.getenv('%s_REGEX' % bknd) and \
+            re.search(r'%s' % os.getenv('%s_REGEX' % bknd), data['project']) is not None:
+            parm_backend['BACKEND_SELECTION'] = bknd
+            break
+    trigger_next("post-snapshot", data, extra_params=parm_backend)
+
+    # TRIGGER NEXT RSYNC-DOWNLOAD (MIRROR)
+    if os.getenv("RSYNC_DOWNLOAD_ENABLED", "0") != "0":
+        trigger_next('RSYNC_DOWNLOAD', data)
 
     # Update NUGET pacakges to server
     if os.getenv("NUGET_UPDATE_ENABLE","0") == "1":
@@ -343,16 +352,7 @@ def main():
 
     if buildmonitor_enabled:
         bm_end_datetime = datetime.datetime.now()
-        # for sr_stage & build_snapshot
-        #buildmonitor.end_create_snapshot_create_images_for_sr_stage(project,
-        #                                                            bm_start_datetime,
-        #                                                            bm_end_datetime)
         bm_snapshot_url = os.path.join(base_url, repo_data['repo_path'])
-        #buildmonitor.end_create_snapshot_for_post_build_snapshot(project,
-        #                                                         bm_start_datetime,
-        #                                                         bm_end_datetime,
-        #                                                         bm_snapshot_url,
-        #                                                         repo_data['build_id'])
         bm_stage = 'Post_Snap_End'
         bm_data = {"bm_stage" : bm_stage,
                    "bm_start_datetime" : str(bm_start_datetime),
diff --git a/job_find_incorrect_filenames.py b/job_find_incorrect_filenames.py
new file mode 100755 (executable)
index 0000000..3c4aa78
--- /dev/null
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import time
+import requests
+import logging
+import subprocess
+from bs4 import BeautifulSoup
+from xml.etree.ElementTree import fromstring
+#from gevent.monkey import patch_all; patch_all()
+#from gevent.pool import Pool
+
+
+name, manifest_url, git_cache_home, regex = os.getenv('PROFILE').split(';')
+
+gerrit_username = os.getenv('GERRIT_USERNAME')
+gerrit_hostname = os.getenv('GERRIT_HOSTNAME')
+gerrit_sshport = os.getenv('GERRIT_SSHPORT')
+workspace = os.getenv('WORKSPACE')
+
+
+def init_logger():
+    logger = logging.getLogger()
+    console_handler = logging.StreamHandler()
+    logger.addHandler(console_handler)
+    logger.setLevel(logging.DEBUG)
+
+
+def init_git_cache_home():
+    if not os.path.exists(git_cache_home):
+        os.mkdir(git_cache_home)
+
+
+def init():
+    init_git_cache_home()
+    init_logger()
+
+
+def clone_all_projects(projects):
+    # blocking
+    for project in projects:
+        clone_project(project)
+
+    # gevent
+    #pool = Pool(8)
+    #pool.map(clone_project, projects)
+
+
+def clone_project(project):
+    """docstring for clone_project"""
+
+    gerrit_path, revision = project
+    gerrit_url = 'ssh://{0}@{1}:{2}/{3}'.format(gerrit_username,
+                                                gerrit_hostname,
+                                                gerrit_sshport,
+                                                gerrit_path)
+
+    git_cache_dir = '{0}/{1}'.format(git_cache_home, gerrit_path)
+    logging.debug(gerrit_path)
+
+    cmd = 'git clone {0} {1}'.format(gerrit_url, git_cache_dir)
+    # if cache exists, just fetch
+    if os.path.exists(git_cache_dir):
+        cmd = 'git fetch'.format(git_cache_dir)
+        os.chdir(git_cache_dir)
+
+    logging.debug(cmd)
+    subprocess.call(cmd, shell=True)
+
+    # checkout revision
+    os.chdir(git_cache_dir)
+    subprocess.call('git checkout {0}'.format(revision).split())
+    os.chdir(workspace)
+
+
+def get_text_from_url(url, timeout=10, max_retry=10):
+    """docstring for get_text_from_url"""
+
+    for loop in range(max_retry):
+        try:
+            r = requests.get(url,
+                             timeout=timeout)
+            if r.status_code == 200:
+                break
+            time.sleep(0.5)
+        except requests.exceptions.Timeout as e:
+            logging.debug(e)
+            continue
+        except requests.exceptions.ConnectionError as e:
+            logging.debug(e)
+            continue
+        except Exception as e:
+            logging.debug(e)
+            raise Exception('Can\'t access url : {0}'.format(url))
+    else:
+        raise Exception('Can\'t access url : {0}'.format(url))
+    return r.text
+
+
+def get_manifests(manifest_url):
+    """docstring for get_manifests"""
+    if not manifest_url.endswith('/'):
+        manifest_url += '/'
+    logging.debug(manifest_url)
+
+    text = get_text_from_url(manifest_url)
+    soup = BeautifulSoup(text, 'html.parser')
+
+    file_urls = []
+    for loop in soup.findAll('a', attrs={'href': re.compile('xml$')}):
+        file_urls.append('{0}{1}'.format(manifest_url, loop['href']))
+
+    return file_urls
+
+
+def get_gerrit_prj_list(manifests):
+    """docstring for get_gerrit_prj_list"""
+
+    result = []
+    for loop in manifests:
+        text = get_text_from_url(loop)
+        tree = fromstring(text)
+        logging.debug('total found : {}'.format(len(tree.findall('project'))))
+        for project in tree.findall('project'):
+            if project.attrib['path'] and project.attrib['revision']:
+                result.append((project.attrib['path'],
+                               project.attrib['revision']))
+
+    result = list(set(result))
+    logging.debug('total projects : {0}'.format(len(result)))
+    return result
+
+
+def find_all_incorrect_filenames(projects,
+                                 regex='"(.*@.*|.*\.\.\..*)"',
+                                 output_filename='incorrect_filenames.csv'):
+    """docstring for find_all_incorect_filenames"""
+    # for all projects
+    for loop in projects:
+        gerrit_path, revision = loop
+        git_cache_dir = '{0}/{1}'.format(git_cache_home, gerrit_path)
+
+        logging.debug(git_cache_dir)
+        os.chdir(git_cache_dir)
+        cmd = 'find . -regextype posix-extended -regex {0}'.format(regex)
+        outs = subprocess.check_output(cmd, shell=True)
+        os.chdir(workspace)
+        if outs:
+            logging.debug('write {0} for {1}'.format(output_filename,
+                                                     gerrit_path))
+            with open(output_filename, 'a+') as f:
+                output = '\n'.join(['{0},{1}'.format(gerrit_path, out[2:])
+                                    for out in outs.split()])
+                f.writelines(output)
+                f.write('\n')
+
+
+def main():
+    """docstring for main"""
+
+    init()
+
+    start = time.time()
+
+    # get manifest xml files
+    manifests = get_manifests(manifest_url)
+    # get project/commit list
+    projects = get_gerrit_prj_list(manifests)
+
+    # clone all projects
+    clone_all_projects(projects)
+
+    # find all incorect file names
+    find_all_incorrect_filenames(projects, regex, '{}.csv'.format(name))
+
+    end = time.time() - start
+    print('Elapsed time : {0}'.format(end))
+
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt as e:
+        logging.debug(e)
+        sys.exit(1)
+    except Exception as e:
+        logging.debug(e)
+        sys.exit(1)
index 11abb27..e9af3fd 100755 (executable)
@@ -33,11 +33,11 @@ from xml.dom.minidom import parse,parseString
 from collections import defaultdict
 from StringIO import StringIO
 
-
 from random import randint
 
 from common.buildtrigger import trigger_info, trigger_next
-from common.utils import sync, set_permissions, Workdir
+from common.utils import sync, sync_get, set_permissions, Workdir
+
 
 # Returns the list of baseurls from kickstart configuration.
 def get_base_url(kickstart,buildid):
@@ -244,8 +244,8 @@ def main():
             "build_id": build_id,
             "repo": fields['repo'],
             }
-    if "download_num" in fields:
-        data["download_num"] = int(fields['download_num'])
+    if 'download_num' in fields:
+        data['download_num'] = int(fields['download_num'])
     trigger_next("POST-IMAGE-CREATION", data)
 
     ksf = os.path.join(outdir, '%s.ks' % name)
@@ -327,6 +327,18 @@ def main():
     sync_status = 'success'
 
     for loop in range(2):
+        #TODO: If download_num differs from note, do not proceed!
+        if 'download_num' in fields:
+            try:
+                if 0 == sync_get(os.path.join(sync_dest, 'buildinfo.in'), 'tmp_download_num'):
+                    H = dict(line.strip().split('=') for line in open('tmp_download_num'))
+                    print H
+                    if int(H['download_num']) != int(fields['download_num']):
+                        print 'DO NOT sync! download_num differs: %s, %s' \
+                              % (fields['download_num'], H['download_num'])
+                        break
+            except Exception, err:
+                print 'Error while reading note! %s' % str(err)
         if sync(sync_src, sync_dest):
             print "Retry sync %s to %s" % (sync_src, sync_dest)
             sync_status = 'failed'
@@ -351,8 +363,8 @@ def main():
             "build_id": build_id,
             "repo": fields['repo'],
             }
-    if "download_num" in fields:
-        data["download_num"] = int(fields['download_num'])
+    if 'download_num' in fields:
+        data['download_num'] =  int(fields['download_num'])
 
     trigger_next("POST-IMAGE-CREATION", data)
 
diff --git a/job_importrpm_obs.py b/job_importrpm_obs.py
new file mode 100644 (file)
index 0000000..e07713a
--- /dev/null
@@ -0,0 +1,336 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import json
+import subprocess
+import gzip
+import shutil
+from datetime import datetime
+from common.buildtrigger import trigger_info, trigger_next
+from common.utils import xml_to_obj
+from common.buildservice import BuildService
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+class trigger_for_sync_obs(object):
+
+    profile = None
+    profile_basedir = None
+    profile_repos = {}
+
+    def setup_profile(self, name, project):
+
+        # set up profile
+        self.profile = {'name': name, \
+                        'refprj': project, \
+                        'dest_dir': '/srv/obs/build', \
+                        'option': 1}
+
+        #prepare dir 
+        basedir = os.path.join(self.profile['dest_dir'],self.profile['refprj'])
+        if os.path.exists(basedir):
+            #shutil.rmtree(basedir)
+            print "exists"
+        else:
+            print "%s dir is not exists" %(basedir)
+            return False
+        self.profile_basedir = basedir
+        #os.mkdirs(basedir)
+        repo_dirs = {}
+        repo_dirs['repo'] = [ (repo) for repo in os.listdir(basedir) if os.path.isdir(os.path.join(basedir, repo))]
+        for repo_dir in repo_dirs['repo']:
+            repo_dirs[repo_dir] = [ (arch_dir, os.path.join(repo_dir, arch_dir)) \
+                                    for arch_dir in os.listdir(os.path.join(basedir,repo_dir)) \
+                                        if os.path.isdir(os.path.join(basedir,repo_dir, arch_dir)) ]
+
+        self.profile_repos = repo_dirs
+        for repo in self.profile_repos['repo']:
+            for arch , path in self.profile_repos[repo]:
+                print 'repo = %s , arch = %s, path = %s' %(repo, arch, path)
+
+        #self.profile['repoarch']
+        print 'project = %s' % (project)
+        sys.stdout.flush()
+
+        return self.profile
+
+    def execute_shell(self, cmd, progress=False):
+        print "[INFO] command : %s" % cmd
+        proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        if progress:
+            line_iterator = iter(proc.stdout.readline, b"")
+            for line in line_iterator:
+                print "    %s" % line[:-1]
+        out, err = proc.communicate()
+        if cmd.startswith("rsync"):
+            if err:
+                print "stderr: %s" % err
+                return 'err'
+
+        if err:
+            print "stderr: %s" % err
+            return None
+
+        o = out.strip().split('\n')
+        print "o: %s" % o
+        if len(o) == 1:
+            if o[0] == '':
+                return None
+        return o
+
+    def copy_rsync_rpm(self, repospath, archfilelists, dstdir):
+        """
+        """
+        file_name="/srv/obs/build/_filelist"
+        content = ""
+
+        if os.path.isfile(file_name):
+            os.remove(file_name)
+
+        with open(file_name, 'w') as f:
+            for filepath in archfilelists:
+                f.write(os.path.join(repospath,filepath)+'\n')
+        cmd = "cat %s | " %(file_name)
+        cmd += "xargs -n 1 -P 8 -I% rsync -avz --bwlimit=5120000 % "
+        cmd += "%s/" %(dstdir)
+        print 'cmd = %s' %(cmd)
+        subprocess.call(cmd, shell=True)
+
+    def construct_srv_obs_build_project_repo_package(self, rsyncd, snapshotdir):
+        """
+        """
+        def clear_primary_files(self):
+            for repo in self.profile_repos['repo']:
+                dstdir = os.path.join(self.profile_basedir, repo)
+                primarymd = os.path.join(dstdir, \
+                                         [ x for x in os.listdir(dstdir) \
+                                               if x.endswith('-primary.xml.gz') ][0])
+                os.remove(primarymd)
+
+        print "\n\n3) RSYC DOWNLOAD PRIMARY AND DEPENDS XML for %s at %s " % (self.profile['name'], datetime.now())
+        for repo in self.profile_repos['repo']:
+            repodata_path = os.path.join(rsyncd,snapshotdir,"repos",repo,"packages","repodata")
+            cmd = "rsync %s/ --list-only --include='%s' --exclude='*'" \
+                  " | awk '{ print $5; }' | grep '.xml.gz' " \
+                  % (repodata_path, "*-primary.xml.gz")
+            primarylist = self.execute_shell(cmd)
+            if primarylist and type(primarylist) == list:
+                dstdir =  os.path.join(self.profile_basedir, repo)
+                self.copy_rsync_rpm(repodata_path, primarylist, dstdir)
+            else:
+                print 'No primary repodata found'
+                clear_primary_files(self)
+                return 3
+
+        depends_path = os.path.join(rsyncd,snapshotdir,"builddata","depends")
+        cmd = "rsync %s/ --list-only --include='%s' --exclude='*'" \
+              " | awk '{ print $5; }' | grep '_revpkgdepends.xml' " \
+              % (depends_path, "*_revpkgdepends.xml")
+        deplist = self.execute_shell(cmd)
+        if deplist and type(deplist) == list:
+            dstdir =  os.path.join(self.profile_basedir)
+            self.copy_rsync_rpm(depends_path, deplist, dstdir)
+        else:
+            clear_primary_files(self)
+            print 'No depends list found'
+            return 4
+
+        print "\n\n4) GENERATE PACKAGE RPM LIST for %s at %s " % (self.profile['name'], datetime.now())
+        for repo in self.profile_repos['repo']:
+            src_bin_map = {}
+            dstdir = os.path.join(self.profile_basedir, repo)
+            primarymd = os.path.join(dstdir, \
+                                     [ x for x in os.listdir(dstdir) \
+                                           if x.endswith('-primary.xml.gz') ][0])
+            # Read primary repo metadata
+            primary = xml_to_obj(gzip.open(os.path.join(dstdir, primarymd)))
+            print 'Removing primarymd %s' % primarymd
+            os.remove(primarymd)
+            for package in primary.package:
+                spec_name = re.search(r'(.*)-(.*)-(.*).src.rpm', package.format.sourcerpm).groups()[0]
+                if spec_name in src_bin_map:
+                    src_bin_map[spec_name].append(package.location.href)
+                else:
+                    src_bin_map[spec_name] = [package.location.href]
+            # Read builddep info
+            for _file in [ x for x in os.listdir(self.profile_basedir) \
+                               if repo in x and x.endswith('_revpkgdepends.xml') ]:
+                with open(os.path.join(self.profile_basedir, _file)) as df:
+                    depends = xml_to_obj(''.join(df.readlines()))
+                    if not depends or not depends.package:
+                        continue
+                    for package in depends.package:
+                        if package.source in src_bin_map and package.source != package.name:
+                            src_bin_map[package.name] = src_bin_map.pop(package.source)
+            print '\nRemoving garbage buildepinfo files'
+            for f in os.listdir(self.profile_basedir):
+                if re.search('.*_%s_.*_revpkgdepends.xml' % repo, f):
+                    os.remove(os.path.join(self.profile_basedir, f))
+
+            print "\n\n5) HARD LINK PACKAGE RPMS for %s at %s" % (repo, datetime.now())
+            target_arch = [ x for x in os.listdir(dstdir) if os.path.isdir(os.path.join(dstdir, x)) ]
+            #TODO: arch hack
+            if len(target_arch) != 1:
+                if 'i586' in target_arch:
+                    target_arch.remove('i586')
+            if len(target_arch) != 1:
+                if 'x86_64' in target_arch:
+                    target_arch.remove('x86_64')
+            print '  * %s... Start make link, target_arch : %s' % (repo, target_arch)
+            count = 0
+            for pkg_name in src_bin_map:
+                for y in src_bin_map[pkg_name]:
+                    arch, rpm = y.split('/')
+                    #TODO: i686 -> i586, noarch -> target_arch
+                    arch = arch.replace('i686', 'i586').replace('noarch', target_arch[0])
+                    src_file = os.path.join(dstdir,arch,':full', rpm)
+                    pkg_dir = os.path.join(dstdir,arch,pkg_name)
+                    dst_file = os.path.join(pkg_dir, rpm)
+                    if not os.path.exists(src_file):
+                        print '  not exist... %s, %s' % (src_file, y)
+                        raise LocalError('missing rpms')
+                    # Link rpms... /repo/arch/:full/*.rpm -> /repo/arch/pkg/*.rpm
+                    if not os.path.exists(pkg_dir):
+                        os.makedirs(pkg_dir)
+                    if os.path.exists(dst_file):
+                        os.remove(dst_file)
+                    os.link(src_file, dst_file)
+                    count += 1
+            print '    Total %d rpms linked at %s' % (count, datetime.now())
+            sys.stdout.flush()
+        print '\n\n'
+
+    def run_importrpm_obs(self, action=None):
+
+        print '---[JOB STARTED]-------------------------'
+
+        fields = trigger_info(os.getenv("TRIGGER_INFO"))
+
+        # Check if we've got required fieldsdk-rootstraps in TRIGGER_INFO
+        for field in ('profile', 'target', 'project', 'build_id', 'snapshotdir'):
+            if field not in fields:
+                print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+                return -1
+
+        self.profile = self.setup_profile(fields['profile'], fields['target'])
+        if not self.profile:
+            print 'Skip Sync OBS project %s' % fields['project']
+            return 0
+
+        buildid = fields['build_id'].split('_')[1]
+        snapshotdir = fields['snapshotdir']
+        rsyncd = os.getenv('IMG_SYNC_DEST_BASE')
+        print 'rsyncd = %s snapshotdir = %s ' %(rsyncd, snapshotdir)
+        if not rsyncd and not snapshotdir and not buildid:
+            print "Please check rsync and snapshot dir. skip!!!"
+            return
+
+        print "1) Init a rpm files from download server "
+        for repo in self.profile_repos['repo']:
+
+            rpath = os.path.join(rsyncd,snapshotdir,"repos",repo)
+            repospath = os.path.join(rsyncd,snapshotdir,"repos")
+            noarchstring = "*.noarch.rpm"
+
+            print "1-1) repo = %s" %(repo)
+            for arch , path in self.profile_repos[repo]:
+                print "1-2) repo = %s , arch = %s" %(repo,arch)
+                rescan_after_delete = False
+                for del_x in os.listdir(os.path.join(self.profile_basedir, repo, arch)):
+                    del_path = os.path.join(self.profile_basedir, repo, arch, del_x)
+                    if os.path.isdir(del_path):
+                        shutil.rmtree(del_path)
+                        rescan_after_delete = True
+                if rescan_after_delete:
+                    del_cmd = "obs_admin --rescan-repository %s %s %s " \
+                               %(self.profile['refprj'],repo,arch)
+                    subprocess.call(del_cmd, shell=True)
+
+                archstring = "*.%s.rpm" % (arch)
+                #print rpath
+                cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+                      " | awk '{ print $5; }' | grep '.rpm' " \
+                      % (rpath , archstring)
+                archfilelists = self.execute_shell(cmd)
+                if arch == 'i586':
+                    print 'add i686 arch'
+                    cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+                          " | awk '{ print $5; }' | grep '.rpm' " \
+                          % (rpath , "*.i686.rpm")
+                    #print cmd
+                    extrafilelists = self.execute_shell(cmd)
+                    if extrafilelists:
+                        if archfilelists:
+                            archfilelists += extrafilelists
+                        else:
+                            archfilelists = extrafilelists
+
+                # make dir
+                dstdir = os.path.join(self.profile_basedir, repo, arch,":full")
+                #print dstdir
+                if not os.path.exists(dstdir):
+                    os.mkdir(dstdir)
+
+                # Copy arch rpm binary
+                print "1-4) Copy arch rpm binary "
+                if archfilelists:
+                    self.copy_rsync_rpm(repospath, archfilelists, dstdir)
+
+                # search noarch list
+                cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+                      " | awk '{ print $5; }' | grep '.rpm' " \
+                      % (rpath , noarchstring)
+                #print cmd
+                noarchfilelists = self.execute_shell(cmd)
+
+                # Copy noarch rpm binary
+                print "1-6) Copy noarch rpm binary "
+                if noarchfilelists:
+                    self.copy_rsync_rpm(repospath, noarchfilelists, dstdir)
+
+        # obs-admin rescan
+        print "2) obs_admin --rescan-repository "
+        for repo in self.profile_repos['repo']:
+            for arch , path in self.profile_repos[repo]:
+
+                cmd = "obs_admin --rescan-repository %s %s %s " \
+                      %(self.profile['refprj'],repo,arch)
+                subprocess.call(cmd, shell=True)
+
+        # Link rpms from :full to each package directories
+        #self.construct_srv_obs_build_project_repo_package(rsyncd, snapshotdir)
+
+    def main(self, action):
+
+        print('-----[JOB STARTED: importrpm_for_obs  ]-----')
+        #args = self.parse_args()
+        #profile = self.setup_profile_from_args(args)
+
+        self.run_importrpm_obs(action[1] if len(action) == 2 else None)
+
+if __name__ == '__main__':
+    try:
+        trigger = trigger_for_sync_obs()
+        sys.exit(trigger.main(sys.argv))
+    except Exception as e:
+        print(e)
+        sys.exit(1)
+
+
index 2059440..9374383 100755 (executable)
@@ -29,8 +29,18 @@ from common.buildservice import BuildService
 from common.backenddb import BackendDB
 from common.snapshot import snapshot_project_enabled
 from common.prerelease import is_prerelease_project
+from common.trbs import is_trbs_project
+
 def isReadyForPrerelease(build, project):
+    """
+    isReadyForPrerelease
+    """
+    return True
 
+def isReadyForTrbs(build, project):
+    """
+    isReadyForTrbs
+    """
     return True
 
 def isReadyForCreateSnapshot(build, backenddb, project):
@@ -62,6 +72,11 @@ def main(obs_event_fields):
                 print 'All the repositories are published for project %s.' \
                       'Triggering the prerelease.' % (project)
                 trigger_next('#PRERELEASE#%s#%s' %(project,event_type),obs_event_fields)
+        elif(is_trbs_project(project)):
+            if(isReadyForTrbs(build, project)):
+                print 'All the repositories are published for project %s.' \
+                      'Triggering the trbs.' % (project)
+                trigger_next('#TRBS#%s#%s' %(project,event_type),obs_event_fields)
         else:
             if(isReadyForCreateSnapshot(build, backenddb, project)):
                 print 'All the repositories are published for project %s.' \
index ee82b6a..e02be08 100755 (executable)
@@ -41,14 +41,35 @@ def load_repos(config_file):
 
     bdb = BackendDB(os.getenv('REDIS_HOST'),
                     int(os.getenv('REDIS_PORT')))
-    return bdb.read_repos(file(config_file, 'r').read())
+    ret = bdb.read_repos(file(config_file, 'r').read())
+
+    # Print db contents
+    repos = bdb.get_repos()
+    for x in repos:
+        print '\n%s\n----' % x
+        for k in repos[x]:
+            print '  %s: %s' % (k, repos[x][k])
+
+    return ret
 
 def main():
     """Script entry point.
     """
 
     print '---[JOB STARTED: %s ]-------------------------'
-    event = get_gerrit_event()
+
+    if os.getenv('GERRIT_EVENT_TYPE'):
+        event = get_gerrit_event()
+    elif os.getenv('GIT_COMMIT'):
+        print '\n\nBuilding data from %s\n (%s/%s)\n' \
+              % (os.getenv('GIT_URL'), os.getenv('GIT_BRANCH'), os.getenv('GIT_COMMIT'))
+        # organization/git_path
+        git_project = os.getenv('GIT_URL').split(':')[-1].split('.git')[0]
+        if git_project == os.getenv('REPOSYAML_PRJ_GITHUB'):
+            return load_repos('repos.yaml')
+    else:
+        print 'Illegal trigger. Stop building redis db!'
+        return -1
 
     if event['project'] == os.getenv('REPOSYAML_PRJ'):
         # prepare separate temp directory for each build
@@ -72,3 +93,4 @@ def main():
 
 if __name__ == '__main__':
     sys.exit(main())
+
index b1f9a3e..c9db9ee 100755 (executable)
@@ -367,6 +367,13 @@ def make_repo(project, repo, backenddb, base_url, base_path,
                        'download_num': int(1)},
                        project)
 
+    #TODO: IMAGER want to read download_num to give up; rsync operation.
+    try:
+        with open(os.path.join(prerelease.path, prerelease.build_id, 'buildinfo.in'), 'w') as df:
+            df.write('download_num=%d\n' % current_download_num)
+    except Exception, err:
+        print 'not to update download_num note. %s' % str(err)
+
     return {'project': project,
             'repo' : repos,
             'repo_path': os.path.join(prerelease.dir, prerelease.build_id),
@@ -533,8 +540,13 @@ def main(action):
         data = repo_data.copy()
         # remove unused item
         data.pop('imagedata')
-
-        trigger_next("post-snapshot", data)
+        parm_backend = {}
+        for bknd in ['BACKEND_02', 'BACKEND_01']:
+            if os.getenv('%s_REGEX' % bknd) and \
+                re.search(r'%s' % os.getenv('%s_REGEX' % bknd), data['project']) is not None:
+                parm_backend['BACKEND_SELECTION'] = bknd
+                break
+        trigger_next("post-snapshot", data, extra_params=parm_backend)
     elif action == 'cleanup':
         # request(SR) end of life, this founction should be called to
         # delete the prerelease project "
diff --git a/job_rsync_download.py b/job_rsync_download.py
new file mode 100755 (executable)
index 0000000..98b78ff
--- /dev/null
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import sys
+import os
+import re
+import errno
+import subprocess
+
+import shutil, tempfile, atexit
+
+from common.buildtrigger import trigger_info, trigger_next
+from common.utils import make_latest_link
+
+def get_file_info(file):
+    info = {'owner': 0, \
+            'group' : 0}
+    info['owner'] = os.stat(file).st_uid
+    info['group'] = os.stat(file).st_gid
+    return info
+
+def create_directory(path):
+    try:
+        os.makedirs(path)
+    except OSError as exc: # Python >2.5
+        if exc.errno == errno.EEXIST and os.path.isdir(path):
+            pass
+        else: raise
+
+def execute_shell(cmd, progress=False):
+    print "[INFO] command : %s" % cmd
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    if progress:
+        line_iterator = iter(proc.stdout.readline, b"")
+        for line in line_iterator:
+            print "    %s" % line[:-1]
+    out, err = proc.communicate()
+    if cmd.startswith("rsync"):
+        if err:
+            print "stderr: %s" % err
+            return 'err'
+
+    if err:
+        print "stderr: %s" % err
+        return None
+
+    o = out.strip().split('\n')
+    print "o: %s" % o
+    if len(o) == 1:
+        if o[0] == '':
+            return None
+    return o
+
+def main():
+
+    print '---[JOB STARTED]-------------------------'
+   
+    content = trigger_info(os.getenv("TRIGGER_INFO"))
+
+    # for prerelease and prebuild
+    if content['repo_path'].find(os.getenv("PATH_REPO_BASE")) == 0:
+        if content['repo_path'].count(os.getenv("PATH_REPO_BASE")) == 1:
+            content['repo_path'] = content['repo_path'].replace(os.getenv("PATH_REPO_BASE"),"")
+    rsync_url = os.path.join(os.getenv('RSYNC_SNAPSHOT'), content['repo_path'])
+    #origin_IP = os.getenv('URL_PUBLIC_REPO_BASE').replace("http://", "")
+    origin_IP = re.match(r'.*:\/\/([0-9.:]{1,}).*', os.getenv('URL_PUBLIC_REPO_BASE')).group(1)
+    copy_IP = re.match(r'.*:\/\/([0-9.]{1,}).*', rsync_url).group(1)
+    
+    copy_repo_path = os.path.join(os.getenv('PATH_REPO_COPY'), content['repo_path'])
+    origin_repo_path = os.path.join(os.getenv('PATH_REPO_BASE'), content['repo_path'])
+
+    if not os.path.isdir(copy_repo_path):
+        create_directory(copy_repo_path)
+        if not execute_shell( "cp -lr %s %s" % (origin_repo_path, os.path.dirname(copy_repo_path))):
+            print "[INFO] Done. Copy to sync directory."
+    else:
+        print "[WARING] Alreadly copy snapshot. %s" % copy_repo_path
+        if not execute_shell( "cp -lrf %s %s" % (origin_repo_path, os.path.dirname(copy_repo_path))):
+            print "[INFO] Done. Copy to sync directory."
+
+    #print "[INFO] Create latest link. %s" % copy_repo_path
+    #make_latest_link(copy_repo_path)
+
+    for target_dir in ('builddata/images', 'builddata/reports', 'images'):
+        have_regex_file = execute_shell("find %s -name '*' ! -name '*.log' | xargs grep -l '%s' 2>/dev/null" % \
+                                         (os.path.join(copy_repo_path, target_dir), origin_IP) )
+        
+        if not have_regex_file :
+            print "[WARING] Cann't find some files. It's not formal snapshot."
+            continue
+        for file in have_regex_file:
+            target_file = os.path.join(copy_repo_path, target_dir, file)
+            print target_file
+            file_info = get_file_info(target_file)
+            with open(target_file, 'r') as f:
+                c = f.read()
+                copy_c = c.replace(origin_IP, copy_IP)
+
+            if os.path.isfile(target_file):
+                os.unlink(target_file)
+
+            with open(target_file, 'w+') as nf:
+                nf.write(copy_c)
+            os.chown(target_file, file_info['owner'], file_info['group'])
+            print '[INFO] Modify file to %s' % target_file
+
+    cmd = 'rsync --compress --stats --archive --recursive --hard-links --delete --verbose --exclude="source" %s/ %s/' \
+             % (os.path.dirname(copy_repo_path), os.path.dirname(rsync_url))
+    print "rsync command: %s" % cmd
+    if execute_shell(cmd, progress=True) == 'err':
+        cmd = 'rsync --compress --stats --archive --recursive --hard-links --delete --verbose --exclude="source" %s/ %s/' \
+             % (os.path.dirname(os.path.dirname(copy_repo_path)), os.path.dirname(os.path.dirname(rsync_url)))
+        if execute_shell(cmd, progress=True) == 'err':
+            print "[INFO] Create latest link. %s" % copy_repo_path
+            make_latest_link(copy_repo_path)
+            execute_shell(cmd, progress=True)
+            trigger_next("sync_gss_snapshot", {'repo_path' : content['repo_path']})
+            content['repo_path'] = origin_repo_path
+            trigger_next("build_log_parser", content)
+            raise Exception('[Error] rsync failed.')
+
+    print "[INFO] Create latest link. %s" % copy_repo_path
+    make_latest_link(copy_repo_path)
+    execute_shell(cmd, progress=True)
+    
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/job_sync_repo.py b/job_sync_repo.py
new file mode 100755 (executable)
index 0000000..c897670
--- /dev/null
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import time
+import requests
+import subprocess
+import urlparse
+from common.buildtrigger import trigger_info, trigger_next
+import base64
+import json
+
+#from pprint import pprint
+
+
+def x_le_y(x, y):
+    pattern = r'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.([0-9]{1,2})'
+    p = re.compile(pattern)
+    x = int(p.search(x).group(1))
+    y = int(p.search(y).group(1))
+    return x <= y
+
+
+def get_latest_snapshot_version(url, username='', password='', timeout=5):
+    pattern = r'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.[0-9]{1,2}'
+    p = re.compile(pattern)
+
+    # get data from url
+    for loop in range(10):
+        try:
+            f = requests.get(url,
+                             auth=(username,
+                                   password),
+                             timeout=timeout)
+            if f.status_code == 200:
+                break
+        except requests.exceptions.Timeout as e:
+            print(e)
+            continue
+        except requests.exceptions.ConnectionError as e:
+            print(e)
+            continue
+        except Exception as e:
+            print(e)
+            raise Exception('exception from get_latest_snapshot_version')
+    else:
+        raise Exception('can\'t get latest snapshot version')
+
+    # return snapshot version
+    return p.findall(f.text).pop()
+
+def generate_links(url, ignores=[], username='', password='', timeout=5):
+    """docstring for get_links"""
+
+    # append / at the end of url
+    if not url.endswith('/'):
+        url += '/'
+
+    print('Start {}'.format(url))
+    status_code = 0
+    for loop in range(10):
+        try:
+            resp = requests.get(url,
+                                auth=(username, password),
+                                timeout=timeout)
+            status_code = resp.status_code
+            if status_code == 200:
+                break
+        except requests.exceptions.Timeout as e:
+            print(e)
+            continue
+        except requests.exceptions.ConnectionError as e:
+            print(e)
+            continue
+        except Exception as e:
+            print(e)
+            raise Exception('exception from generate_links')
+    else:
+        raise Exception('Http resp is {}'.format(status_code))
+
+    links = []
+
+    # regex pattern
+    #pattern_links = 'alt=\"\[(TXT|DIR|   )]*\]\".*<a href=\"(.*)\">'
+    pattern_links = '.*<a href=\"(.*)\">'
+    p = re.compile(pattern_links)
+
+    # find all links
+    found_links = p.findall(resp.text)
+    # retrieve dirs and append files at list
+    for link in found_links:
+        sys.stdout.flush()
+        new_url = urlparse.urljoin(url, link)
+        #print(new_url, link[-1], link[-3:-1])
+        if link[-1] == '/' and link[-3:-1] != '..' and not [ignore for ignore in ignores if re.findall(ignore, link)]:
+            links.extend(generate_links(new_url, ignores))
+        elif link[-1] != '/' and not [ignore for ignore in ignores if re.findall(ignore, link)]:
+            links.append(new_url)
+    print('Done {}'.format(url))
+    return links
+
+
+def generate_aria2_input(filename, links):
+    """docstring for generate_aria2_inputs"""
+    pattern = 'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.[0-9]{1,2}.*/'
+    p = re.compile(pattern)
+
+    with open(filename, 'w') as f:
+        for link in links:
+            f.write('{}\n'.format(link))
+            try:
+                dirs = '  dir={}\n'.format(p.search(link).group(0)[:-1])
+            except Exception as err:
+                print 'Exception... %s not parsed...' % link
+            f.write(dirs)
+
+def remote_jenkins_build_job( url, username, password, jobname, token, data=None):
+    """ remote jenkins build job"""
+    print 'remote pbs jenkins job '
+    if url and username and password and jobname:
+        url = '%s/job/%s/buildWithParameters?token=%s' \
+              %(url, jobname, token)
+        try:
+            resp = requests.post(url, params=data,
+                                auth=(username, password)
+                                )
+            status_code = resp.status_code
+            print status_code
+        except requests.exceptions.Timeout as e:
+            print(e)
+        except requests.exceptions.ConnectionError as e:
+            print(e)
+        except Exception as e:
+            print(e)
+            raise Exception('exception')
+
+def main():
+
+    print('-----[JOB STARTED: sync_repo ]-----')
+
+    content = trigger_info(os.getenv("TRIGGER_INFO"))
+    # get envs
+    dest_dir = content.get('dest_dir')
+    snapshot_url = content.get('snapshot_url')
+    snapshot_username = content.get('snapshot_username')
+    snapshot_password = content.get('snapshot_password')
+    snapshot_version = content.get('snapshot_version')
+    start_delay = float(content.get('sync_repo_start_delay'))
+    project = content.get('project')
+    # rsync download
+    origin_rsync_path = os.path.join(os.getenv('IMG_SYNC_DEST_BASE'), dest_dir, snapshot_version)
+    download_rsync_path = os.path.join(os.getenv('RSYNC_SNAPSHOT'), dest_dir, snapshot_version)
+
+    # get latest version
+    latest_snapshot_version = get_latest_snapshot_version(snapshot_url)
+    # wait until the repo is published
+    print('Wait {} min...............'.format(start_delay))
+    time.sleep(start_delay * 60)
+
+    print('-----[Start to sync ]-----')
+    retry_count = 3
+    backup_links = []
+    while True:
+        ignores = ['buildlogs/', 'source/', 'build.xml', '.*debugsource.*.rpm', 'https://www.tizen.org']
+#        ignores = ['buildlogs/', 'source/', 'build.xml', '.*debugsource.*.rpm']
+        aria_urls_output = 'aria2c_urls'
+        aria_urls_input = urlparse.urljoin(snapshot_url,
+                                           snapshot_version)
+
+        # get all file links from url
+        links = generate_links(aria_urls_input, ignores=ignores)
+        links = list(set(links) - set(backup_links))
+
+        print(links)
+        print('-----[sync {}]-----'.format(retry_count))
+        if not links:
+            if retry_count == 0:
+                print('retry_count is 0')
+                break
+            if not os.path.exists(os.path.join(os.getcwd(),'repo', snapshot_version,'images')):
+                time.sleep(10 * 60)
+                retry_count -= 1
+                continue
+            print 'not links ------ break'
+            break
+
+        generate_aria2_input(aria_urls_output, links)
+        backup_links.extend(links)
+
+        print(links)
+        # make a working dir 'repo' and chdir
+        subprocess.call('mkdir -p {}'.format(os.path.join(os.getenv('PATH_REPO_BASE'), dest_dir, snapshot_version)), shell=True)
+        subprocess.call('mkdir -p repo', shell=True)
+        os.chdir('repo')
+
+        # run aria2c
+        aria2c_script = 'aria2c -i ../{0}'.format(aria_urls_output)
+        if snapshot_username != '' and snapshot_username is not None:
+            aria2c_script += ' --http-user={0}'.format(snapshot_username)
+        if snapshot_password != '' and snapshot_password is not None:
+            aria2c_script += ' --http-passwd={0}'.format(snapshot_password)
+
+        subprocess.call(aria2c_script, shell=True)
+
+        if not os.path.exists(os.path.join(os.getcwd(),snapshot_version,'builddata')):
+            os.makedirs(os.path.join(os.getcwd(),snapshot_version,'builddata'))
+
+        # make latest link
+        if x_le_y(latest_snapshot_version, snapshot_version) and not os.path.exists(os.path.abspath('latest')):
+            subprocess.call('ln -sf {0} {1}'.format(snapshot_version, 'latest'),
+                            shell=True)
+
+        # change working dir
+        os.chdir(os.pardir)
+
+    # rsync download
+    origin_rsync_path = os.path.join(os.getenv('IMG_SYNC_DEST_BASE'), dest_dir)
+    download_rsync_path = os.path.join(os.getenv('RSYNC_SNAPSHOT'), dest_dir)
+
+    src_dir = os.path.join(os.getenv('WORKSPACE'), 'repo')
+    print('origin_rsync_path=%s' % origin_rsync_path)
+    print('download_rsync_path=%s' % download_rsync_path)
+    print('src_dir=%s' % src_dir)
+
+    cmd = 'ls %s | grep -v latest | parallel --will-cite -j 8 rsync --bwlimit=5120000 --compress --stats --archive --recursive --hard-links --verbose %s/{} %s/' \
+            % (src_dir, src_dir, origin_rsync_path)
+    print(cmd)
+    subprocess.call(cmd, shell=True)
+
+    cmd = 'ls %s | parallel --will-cite -j 8 rsync --bwlimit=5120000 --compress --stats --archive --recursive --hard-links --verbose %s/{} %s/' \
+            % (src_dir, src_dir, origin_rsync_path)
+    print(cmd)
+    subprocess.call(cmd, shell=True)
+
+    cmd = 'ls %s | grep -v latest | parallel --will-cite -j 8 rsync --bwlimit=5120000 --compress --stats --archive --recursive --hard-links --verbose %s/{} %s/' \
+            % (src_dir, src_dir, download_rsync_path)
+    print(cmd)
+    subprocess.call(cmd, shell=True)
+
+    cmd = 'ls %s | parallel --will-cite -j 8 rsync --bwlimit=5120000 --compress --stats --archive --recursive --hard-links --verbose %s/{} %s/' \
+            % (src_dir, src_dir, download_rsync_path)
+    print(cmd)
+    subprocess.call(cmd, shell=True)
+
+
+    rootstrap_data = {"build_id": snapshot_version,
+                      "project": project,
+                      }
+    trigger_next("create-sdkrootstrap", rootstrap_data)
+
+    trbs_enabled = os.getenv("TRBS_ENABLED", "0") != "0"
+    if trbs_enabled:
+        # trigger sync obs job
+        syncobs_data = {
+                        "project": project,
+                        "snapshotdir": os.path.join(dest_dir,snapshot_version),
+                        "build_id": snapshot_version
+                       }
+        remote_jenkins_build_job(os.getenv('REMOTE_TRBS_JENKINS_URL'), \
+                                 os.getenv('REMOTE_TRBS_JENKINS_USER'), \
+                                 os.getenv('REMOTE_TRBS_JENKINS_PW'), \
+                                 os.getenv('REMOTE_TRBS_JOB_NAME'), \
+                                 os.getenv('REMOTE_TRBS_JOB_TOKEN'), \
+                                 'TRIGGER_INFO=%s\n' %(base64.b64encode(json.dumps(syncobs_data))))
+    # Update NUGET pacakges to server
+    if os.getenv("NUGET_UPDATE_ENABLE","0") == "1":
+        repo_path = os.path.join(src_dir, snapshot_version)
+        repo_dirs = {}
+        arch_dirs = {}
+        for repo_dir in os.listdir(os.path.join( repo_path, "repos")):
+            arch_dirs = {}
+            arch_dirs["archs"] = [ arch_dir for arch_dir in \
+                                   os.listdir(os.path.join(repo_path, "repos", repo_dir, "packages")) ]
+            repo_dirs[repo_dir] = arch_dirs
+
+        nuget_data = {"build_id": snapshot_version,
+                      "project": project,
+                      "repo": repo_dirs,
+                      "repo_path": os.path.join( dest_dir, snapshot_version ),
+                     }
+        trigger_next("update-nuget", nuget_data)
+    ####
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main())
+    except Exception as e:
+        print(e)
+        sys.exit(1)
+
diff --git a/job_trigger_for_sync_repo.py b/job_trigger_for_sync_repo.py
new file mode 100755 (executable)
index 0000000..2851b18
--- /dev/null
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import requests
+import subprocess
+from argparse import ArgumentParser
+from common.buildtrigger import trigger_next
+
+
+class trigger_for_sync_repo(object):
+
+    profiles = [
+                 {'name': 'unified',
+                 'project': 'Tizen:Unified',
+                 'dest_dir': 'public_mirror/tizen/unified',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/unified/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 50},
+                 {'name': 'mobile',
+                 'project': 'Tizen:Mobile',
+                 'dest_dir': 'public_mirror/tizen/mobile',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/mobile/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                 {'name': 'mobile_3.0',
+                 'project': 'Tizen:3.0:Mobile',
+                 'dest_dir': 'public_mirror/tizen/3.0-mobile',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-mobile/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'wearable',
+                 'project': 'Tizen:Wearable',
+                 'dest_dir': 'public_mirror/tizen/wearable',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/wearable/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'wearable_3.0',
+                 'project': 'Tizen:3.0:Wearable',
+                 'dest_dir': 'public_mirror/tizen/3.0-wearable',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-wearable/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'common',
+                 'project': 'Tizen:Common',
+                 'dest_dir': 'public_mirror/tizen/common',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/common/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'common_3.0',
+                 'project': 'Tizen:3.0:Common',
+                 'dest_dir': 'public_mirror/tizen/3.0-common',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-common/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'base',
+                 'project': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/base',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/base/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'base_3.0',
+                 'project': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-base',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-base/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'tv',
+                 'project': 'Tizen:TV',
+                 'dest_dir': 'public_mirror/tizen/tv',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/tv/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'tv_3.0',
+                 'project': 'Tizen:3.0:TV',
+                 'dest_dir': 'public_mirror/tizen/3.0-tv',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-tv/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'ivi',
+                 'project': 'Tizen:IVI',
+                 'dest_dir': 'public_mirror/tizen/ivi',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/ivi/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'ivi_3.0',
+                 'project': 'Tizen:3.0:IVI',
+                 'dest_dir': 'public_mirror/tizen/3.0-ivi',
+                 'snapshot_url': 'http://download.tizen.org/snapshots/tizen/3.0-ivi/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-base',
+                 'project': 'daily:Tizen:Base',
+                 'dest_dir': 'public_mirror/release/daily/tizen/base',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/base/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-base_3.0',
+                 'project': 'daily:Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/release/daily/tizen/3.0-base',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/3.0-base/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-mobile',
+                 'project': 'daily:Tizen:Mobile',
+                 'dest_dir': 'public_mirror/release/daily/tizen/mobile',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/mobile/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-wearable',
+                 'project': 'daily:Tizen:Wearable',
+                 'dest_dir': 'public_mirror/release/daily/tizen/wearable',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/wearable/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-mobile_3.0',
+                 'project': 'daily:Tizen:3.0:Mobile',
+                 'dest_dir': 'public_mirror/release/daily/tizen/3.0-mobile',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/3.0-mobile/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-wearable_3.0',
+                 'project': 'daily:Tizen:3.0:Wearable',
+                 'dest_dir': 'public_mirror/release/daily/tizen/3.0-wearable',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/3.0-wearable/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                {'name': 'daily-unified',
+                 'project': 'daily:Tizen:Unified',
+                 'dest_dir': 'public_mirror/release/daily/tizen/unified',
+                 'snapshot_url': 'http://download.tizen.org/releases/daily/tizen/unified/',
+                 'snapshot_username': '',
+                 'snapshot_password': '',
+                 'sync_repo_start_delay': 30},
+                ]
+
+    profile = None
+    mirror_root = os.getenv('PATH_REPO_BASE')
+    pattern = r'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.[0-9]{1,2}'
+
+    def parse_args(self):
+        parser = ArgumentParser('trigger_for_sync_repo')
+        parser.add_argument('profile',
+                            help='tizen profile name',
+                            type=str)
+        parser.add_argument('-u', '--username',
+                            help='snapshot server username',
+                            type=str)
+        parser.add_argument('-p', '--password',
+                            help='snapshot server password',
+                            type=str)
+        parser.add_argument('-d', '--sync_repo_start_delay',
+                            help='delay for repo_sync',
+                            type=str)
+        parser.add_argument('-s', '--snapshot_version',
+                            help='snapshot version',
+                            type=str)
+        return parser.parse_args()
+
+    def setup_profile_from_args(self, args):
+        # set up profile
+        for l in self.profiles:
+            if args.profile.lower() == l['name']:
+                self.profile = l
+                break
+
+        # update username
+        if args.username:
+            self.profile['snapshot_username'] = args.username
+
+        # update password
+        if args.password:
+            self.profile['snapshot_password'] = args.password
+
+        # update sync-start-delay
+        if args.sync_repo_start_delay:
+            self.profile['sync_repo_start_delay'] = args.sync_repo_start_delay
+
+        # update snapshot_version
+        if args.snapshot_version:
+            self.profile['snapshot_version'] = args.snapshot_version
+
+        return self.profile
+
+    def get_latest_snapshot_version(self, profile, timeout=5):
+        p = re.compile(self.pattern)
+
+        # get data from url
+        for loop in range(10):
+            try:
+                f = requests.get(profile['snapshot_url'],
+                                 auth=(profile['snapshot_username'],
+                                       profile['snapshot_password']),
+                                 timeout=timeout)
+                if f.status_code == 200:
+                    break
+            except requests.exceptions.Timeout as e:
+                print(e)
+                continue
+            except requests.exceptions.ConnectionError as e:
+                print(e)
+                continue
+            except Exception as e:
+                print(e)
+                raise Exception('exception from get_latest_snapshot_version')
+        else:
+            raise Exception('can\'t get latest snapshot version')
+
+        # return snapshot version
+        return p.findall(f.text).pop()
+
+    def run_sync_repo(self, profile):
+
+        force_trigger = False
+
+        # if snapshot_version does not exists in profile, get latest snapshot version
+        if not 'snapshot_version' in profile:
+            profile['snapshot_version'] = self.get_latest_snapshot_version(profile)
+        else:
+            force_trigger = True
+
+        path = os.path.join(os.path.join(self.mirror_root,
+                                         profile['dest_dir']),
+                            profile['snapshot_version'])
+        if not os.path.exists(path) or force_trigger:
+            print('-----[trigger sync_repo job for {} ]-----'.format(profile['snapshot_version']))
+            subprocess.call('mkdir -p {}'.format(path), shell=True)
+
+            trigger_next('sync_repo_{}'.format(profile['name']), profile)
+
+    def main(self):
+
+        print('-----[JOB STARTED: trigger_for_sync_repo ]-----')
+        args = self.parse_args()
+        profile = self.setup_profile_from_args(args)
+        self.run_sync_repo(profile)
+
+
+if __name__ == '__main__':
+    try:
+        trigger = trigger_for_sync_repo()
+        sys.exit(trigger.main())
+    except Exception as e:
+        print(e)
+        sys.exit(1)
diff --git a/job_trigger_obs_sync.py b/job_trigger_obs_sync.py
new file mode 100755 (executable)
index 0000000..d7f2220
--- /dev/null
@@ -0,0 +1,796 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import json
+import requests
+import urlparse
+from time import sleep
+from datetime import datetime
+
+import urllib2
+
+from osc import conf, core
+from common.buildservice import BuildService
+from common.buildtrigger import trigger_info, trigger_next, trigger_jenkins_build
+from gitbuildsys.errors import ObsError
+import xml.etree.ElementTree as ET
+import xml.etree.cElementTree as ElementTree
+from common.mapping import git_obs_map, get_ref_map, git_obs_map_full_list
+from common.gerrit import GerritEnv
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+def get_manifest_filelists_snapshot(profile, request_url, timeout=5, group=None):
+        p = re.compile(r'alt=\"\[(TXT|DIR|   )]*\]\".*<a href=\"(.*)\">')
+
+        if not request_url:
+            return []
+        print request_url
+        # get data from url
+        for loop in range(10):
+            try:
+                f = requests.get(request_url,
+                                 auth=(profile['snapshot_username'],
+                                       profile['snapshot_password']),
+                                 timeout=timeout)
+                if f.status_code == 200:
+                    break
+            except requests.exceptions.Timeout as e:
+                print(e)
+                continue
+            except requests.exceptions.ConnectionError as e:
+                print(e)
+                continue
+            except Exception as e:
+                print(e)
+                raise Exception('exception from get manifest filelists')
+        else:
+            raise Exception('can\'t get manifest filelists')
+
+        # returns
+        results = {}
+        exclude_pkgs = []
+        found_links = p.findall(f.text)
+        for link in found_links:
+            manifest_url = os.path.join(request_url, link[1])
+            if link[0] == 'TXT':
+                f = requests.get(manifest_url,
+                                 auth=(profile['snapshot_username'],
+                                       profile['snapshot_password']),
+                                 timeout=timeout)
+                try:
+                    tree = ElementTree.fromstring(f.text)
+                except ElementTree.ParseError:
+                    raise ElementTree.ParseError
+                for result in tree.findall('project'):
+                    if '_preloadapp.xml' in link[1]:
+                        exclude_pkgs.append(''.join(result.get('path')))
+                    else:
+                        results[''.join(result.get('path'))] = result.get('revision')
+
+        if group == 'abs':
+            preloadapp_pkgs = {}
+            for app in exclude_pkgs:
+                preloadapp_pkgs[app] = results[app]
+            return preloadapp_pkgs
+
+        #print results
+        return results
+
+class trigger_for_sync_obs(object):
+
+    profiles = [
+                {'name': 'base',
+                 'project': 'Tizen:Base',
+                 'baseproject': '',
+                 'dest_dir': 'public_mirror/tizen/base',
+                 'snapshot_url': '',
+                 'snapshot_username': '', 
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-base',
+                 'project': 'Tizen:3.0:Base',
+                 'baseproject': '',
+                 'dest_dir': 'public_mirror/tizen/3.0-base',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'mobile',
+                 'project': 'Tizen:Mobile',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/mobile',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-mobile',
+                 'project': 'Tizen:3.0:Mobile',
+                 'baseproject': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-mobile',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'common',
+                 'project': 'Tizen:Common',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/common',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-common',
+                 'project': 'Tizen:3.0:Common',
+                 'baseproject': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-common',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'wearable',
+                 'project': 'Tizen:Wearable',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/wearable',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-wearable',
+                 'project': 'Tizen:3.0:Wearable',
+                 'baseproject': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-wearable',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'tv',
+                 'project': 'Tizen:TV',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/tv',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-tv',
+                 'project': 'Tizen:3.0:TV',
+                 'baseproject': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-tv',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'ivi',
+                 'project': 'Tizen:IVI',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/ivi',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': '3.0-ivi',
+                 'project': 'Tizen:3.0:IVI',
+                 'baseproject': 'Tizen:3.0:Base',
+                 'dest_dir': 'public_mirror/tizen/3.0-ivi',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                {'name': 'unified',
+                 'project': 'Tizen:Unified',
+                 'baseproject': 'Tizen:Base',
+                 'dest_dir': 'public_mirror/tizen/unified',
+                 'snapshot_url': '',
+                 'snapshot_username': '',
+                 'snapshot_password': ''
+                },
+                ]
+
+    profile = None
+    pattern = r'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.[0-9]{1,2}'
+
+    def setup_profile(self, project):
+        # set up profile
+        for l in self.profiles:
+            if project == l['project']:
+                self.profile = l
+                snap = os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"), self.profile['dest_dir'])
+                self.profile['snapshot_url'] = snap
+                print 'project = %s' % (project)
+                return self.profile
+        return False
+
+    def remove_duplicates(self, t, s):
+        """docstring for make_unique:"""
+        result=[]
+        result=list(set(t) - set(s))
+        return result
+
+    def get_snapshot_version(self, profile, timeout=5):
+        p = re.compile(self.pattern)
+        
+        if not profile['snapshot_url']:
+            return []
+
+        # get data from url
+        for loop in range(10):
+            try:
+                f = requests.get(profile['snapshot_url'],
+                                 auth=(profile['snapshot_username'],
+                                       profile['snapshot_password']),
+                                 timeout=timeout)
+                if f.status_code == 200:
+                    break
+            except requests.exceptions.Timeout as e:
+                print(e)
+                continue
+            except requests.exceptions.ConnectionError as e:
+                print(e)
+                continue
+            except Exception as e:
+                print(e)
+                raise Exception('exception from get_latest_snapshot_version')
+        else:
+            raise Exception('can\'t get latest snapshot version')
+
+        # return snapshot version
+        results = [str(s) for s in p.findall(f.text)]
+        #print results
+        if len(results) > 2:
+            return [results[-1], results[-3]]
+        else:
+            return [results[-1]]
+
+    def create_project(self, build, target, info, meta, config, baserefproject=None):
+        """
+        create project
+        """
+
+        try:
+            if not build.exists(target):
+                try:
+                    build.create_project(target, None, description=json.dumps(info))
+                except ObsError, error:
+                    raise LocalError("Unable to create project %s: %s" % (target, error))
+
+                # set meta
+                xml_meta = ElementTree.fromstringlist(meta)
+                #change the target project name
+                xml_meta.set('name',target)
+                #delete remote person
+                for person_element in xml_meta.findall('person'):
+                    xml_meta.remove(person_element)
+
+                # replace
+                if baserefproject:
+                    for repo_element in xml_meta.findall('repository'):
+                        for element in repo_element.findall('path'):
+                            element.set('project',baserefproject)
+
+                #print ElementTree.tostring(xml_meta)
+                #add target person
+                element = ElementTree.Element('person', {"userid": "%s" % ('Admin'),"role": "maintainer"})
+                xml_meta.append(element)
+                print ElementTree.tostring(xml_meta)
+                build.set_meta(ElementTree.tostring(xml_meta), target)
+                # set project config
+                #print config
+                build.set_project_config(target, config)
+
+                #disable publish flag
+                build.disable_build_flag(target, repo = None, flag="publish", status="disable")
+                #disable build flag
+                build.disable_build_flag(target, repo = None, flag="build", status="disable")
+                print "\nTarget project %s created" %(target)
+                return True
+            else:
+                print "\nTarget project %s exist" %(target)
+                return False
+        except ObsError, error:
+            raise LocalError("Unable to create project %s: %s" % (target, error))
+
+    def create_copy_pac(self, remote_build, build, profile, target, pkgname):
+        """                         """
+        if not build.exists(target, pkgname):
+            try:
+                build.create_package(target, pkgname)
+            except ObsError, error:
+                raise UploadError("Unable to create package %s/%s :%s" % \
+                        (target, pkgname, error))
+
+        service_file_name = "_service"
+        remote_build.get_source_file(profile, pkgname, service_file_name)
+        content = ""
+        with open(service_file_name, 'r') as f:
+            content = f.read()
+        content = content.replace("%s:%s" % (os.getenv("GERRIT_FETCH_URL"),os.getenv("GERRIT_SSHPORT")),
+                        "file://%s" %(os.getenv("TRBS_GIT_CACHE_DIR")))
+        print content
+        with open(service_file_name, 'w') as f:
+            f.write(content)
+        commit_msg="uploaded to copy pac %s/%s from %s" % (target, pkgname, profile)
+        try:
+            build.commit_files(target, pkgname,
+                   [(service_file_name, True)], commit_msg)
+        except ObsError, error:
+            raise UploadError("Unable to upload _service to %s: %s" % \
+                (target, error))
+        print "Copypac done."
+
+    def list_packages_from_remote(self, remote_build, build, profile, target, packages=None, existing=False):
+        """                         
+            "<services><service name='gbs'>" \
+            "<param name='revision'>%s</param>" \
+            "<param name='url'>%s</param>" \
+            "<param name='git-meta'>_git-meta</param>" \
+            "<param name='error-pkg'>2</param>" \
+            "</service></services>"
+        """
+        ret_dict = {}
+
+        sourceinfo = remote_build.get_sourceinfo_list(profile)
+        upload_packages=""
+        if packages:
+            upload_packages = packages
+        else:
+            upload_packages = [ package for package in sourceinfo if not re.search("_aggregate", package) ]
+
+        for package in upload_packages:
+            if sourceinfo[package]:
+                print "    [sourceinfo(%s)] %s" % (package, sourceinfo[package])
+                link_prj, link_pkg = sourceinfo[package][-1].split('/')
+                if link_prj and link_pkg :
+                    continue
+            elif re.search("_aggregate", package):
+                print "    [_aggregate] %s/%s" % (profile, package)
+                aggregate_file_name="_aggregate"
+                remote_build.get_source_file(profile, package, aggregate_file_name)
+                content = ""
+                with open(aggregate_file_name, 'r') as f:
+                    content = f.read()
+
+                if not re.search("qemu_aggregate", package):
+                    content_xml_root = ElementTree.fromstringlist(content)
+                    for element in content_xml_root.findall('aggregate'):
+                        element.set('project',target)
+                    content = ElementTree.tostring(content_xml_root)
+
+                print "      [_aggregate] O %s" % (package)
+
+                meta = remote_build.get_meta(profile, package)
+                xml_meta = ElementTree.fromstringlist(meta)
+                xml_meta.set('project',target)
+                meta = ElementTree.tostring(xml_meta)
+                print "      [_aggregate] META %s: %s" % (package, meta)
+
+                ret_dict[package] = {'type': aggregate_file_name, \
+                                     'content': content, \
+                                     'meta': meta}
+            else:
+                service_file_name = "_service"
+                remote_build.get_source_file(profile, package, service_file_name)
+                content = ""
+                with open(service_file_name, 'r') as f:
+                    content = f.read()
+
+                if content:
+                    #### replace ssh://review.tizen.org:29418
+                    content = content.replace("%s:%s" % (os.getenv("GERRIT_FETCH_URL"),os.getenv("GERRIT_SSHPORT")),
+                                              "file://%s" %(os.getenv("TRBS_GIT_CACHE_DIR")))
+                    #### replace ssh://review.tizen.org
+                    content = content.replace("%s" % (os.getenv("GERRIT_FETCH_URL")),
+                                              "file://%s" %(os.getenv("TRBS_GIT_CACHE_DIR")))
+
+                    _git = ''
+                    _rev = ''
+                    _root = ElementTree.fromstringlist(content)
+                    for elem in _root.findall('service'):
+                        for param in elem.findall('param'):
+                            if param.get('name') == 'url':
+                                _url = param.text
+                                if len(_url.split(os.getenv('TRBS_GIT_CACHE_DIR')+'/')) == 2:
+                                    _git = _url.split(os.getenv('TRBS_GIT_CACHE_DIR')+'/')[1]
+                                elif len(_url.split(os.getenv('GERRIT_FETCH_URL')+'/')) == 2:
+                                    _git = _url.split(os.getenv('TRBS_GIT_CACHE_DIR')+'/')[1]
+                            if param.get('name') == 'revision':
+                                _rev = param.text
+
+                    print "      [_service] O %s (%s,%s)" % (package, _git, _rev)
+
+                    ret_dict[package] = {'type': service_file_name, \
+                                         'content': content, \
+                                         'meta': None, \
+                                         'git': _git, \
+                                         'rev': _rev, \
+                                         'exist': existing}
+                else:
+                    print "      [_service] X %s" % (package)
+
+        return ret_dict
+
+    def _update_packages(self, remote_build, build, profile, target, upload_packages):
+        dbg_idx = 0
+        for package in upload_packages:
+            dbg_idx += 1
+            _update_type = upload_packages[package]['type']
+            raw = upload_packages[package]['content']
+            with open(_update_type, 'w') as f:
+                f.write(raw)
+            #if not build.exists(target, package):
+            if True:
+                try:
+                    build.create_package(target, package)
+                except ObsError, error:
+                    #raise UploadError('[%s] Unable to create package %s/%s :%s' \
+                    print '[%s] Unable to create package %s/%s :%s' \
+                                      % (_update_type, target, package, error)
+            commit_msg="uploaded to package %s/%s from %s" % (target, package, profile)
+            try:
+                build.commit_files(target, package,
+                       [(_update_type, True)], commit_msg)
+            except ObsError, error:
+                raise UploadError("Unable to upload %s to %s: %s" % \
+                    (_update_type, target, error))
+
+            dbg_prn = '%s %s\n        ' % (_update_type, package)
+            if _update_type == '_aggregate':
+                build.set_meta(upload_packages[package]['meta'], target, package)
+                dbg_prn = '%s%s' % (dbg_prn, raw)
+            elif _update_type == '_service':
+                dbg_prn = '%s%s (%s)' % (dbg_prn, raw.split("\'revision\'>")[1].split('<')[0], \
+                                         raw.split("file://")[1].split('<')[0])
+
+            print '    [_update %d/%d] %s' % (dbg_idx, len(upload_packages), dbg_prn)
+
+    def update_packages(self, remote_build, build, profile, target, upload_packages):
+        print '  Total packages to check... %d' % len(upload_packages)
+        plain_packages = upload_packages.copy()
+
+        # Move time-consuming packages to front
+        power_packages = {}
+        for x in ['chromium-efl', 'linux-exynos', 'gdb', 'elementary', 'efl', 'emulator-kernel']:
+            if x in upload_packages:
+                power_packages[x] = upload_packages[x]
+                del plain_packages[x]
+        if power_packages:
+            self._update_packages(remote_build, build, profile, target, power_packages)
+        if upload_packages:
+            self._update_packages(remote_build, build, profile, target, plain_packages)
+
+    def create_related_packages(self, remote_build, build, profile, target):
+        """create the 'link' package that relate the original package
+           profile is the base project
+           target  is the target project
+        """
+        sourceinfo = remote_build.get_sourceinfo_list(profile)
+        for package in sourceinfo:
+            if sourceinfo[package]:
+                link_prj, link_pkg = sourceinfo[package][-1].split('/')
+                if link_prj and link_pkg:
+                    if build.exists(target, package):
+                        build.delete_package(target, package)
+                    build.create_link_pac(target, link_pkg, \
+                                           target, package)
+                    print '  [_link] %s/%s -> %s/%s' % (target, link_pkg, target, package)
+
+    def compare_with_manifest(self, todo_dict, manifest_packages):
+        #TODO: If we have changed git path???
+        #TODO: If manifest items are not proceeded???
+        for item in todo_dict:
+            if 'git' in todo_dict[item] and todo_dict[item]['git'] \
+                and todo_dict[item]['git'] in manifest_packages:
+                rev_my = todo_dict[item]['rev']
+                rev_snapshot = manifest_packages[todo_dict[item]['git']]
+                if rev_my != rev_snapshot:
+                    print '  >> DIFFER (%s) -> (%s) %s' % (rev_my, rev_snapshot, item)
+                    todo_dict[item]['rev'] = rev_snapshot
+                    todo_dict[item]['content'] = todo_dict[item]['content'].replace(rev_my, rev_snapshot)
+                    todo_dict[item]['exist'] = False
+
+        # Remove packages that are already exists which do not need to update
+        for k, v in todo_dict.items():
+            if 'exist' in v and v['exist'] == True:
+                del todo_dict[k]
+        return todo_dict
+
+    def run_profile_update(self, profile, target):
+
+        obs_api = os.getenv("OBS_API_URL")
+        obs_user = os.getenv("OBS_API_USERNAME")
+        obs_passwd = os.getenv("OBS_API_PASSWD")
+
+        build = BuildService(obs_api, obs_user, obs_passwd)
+
+        this_project = profile
+
+        if build.exists(this_project):
+            try:
+                build.unlink_project(this_project)
+            except Exception, err:
+                print 'Your project %s is broken: %s. Re-create it!' % (this_project, err)
+                build.cleanup(this_project, "Create new.")
+                sleep(10) # Wait 30 seconds...
+                build.create_project(this_project, target)
+        else:
+            build.create_project(this_project, target)
+
+        build.disable_build_flag(this_project, repo = None, flag='build', status='disable')
+        build.disable_build_flag(this_project, repo = None, flag='publish', status='disable')
+        #build.link_project(this_project, src=target, linktype=None)
+
+        target_meta = build.get_meta(target)
+        print 'OK we got target_meta...\n%s' % target_meta
+        target_xml_meta = ElementTree.fromstringlist(target_meta)
+        target_xml_meta.attrib['name'] = this_project
+        target_xml_meta.find('title').text = 'Reference from %s' % target
+        for repo_element in target_xml_meta.findall('repository'):
+            for element in repo_element.findall('path'):
+                repo_element.remove(element)
+            #element.set('project', target)
+            #element.set('repository', repo_element.get('name'))
+            repo_element.insert(-1, ElementTree.Element('path', project=target, repository=repo_element.get('name')))
+
+        print 'set meta started...\n%s' % ElementTree.tostring(target_xml_meta)
+        build.set_meta(ElementTree.tostring(target_xml_meta), this_project)
+
+        build.link_project(this_project, src=target, linktype=None)
+
+        print 'Profile %s updated to %s' % (profile, target)
+
+    def run_sync_obs(self):
+
+        force_trigger = False
+
+        print '---[JOB STARTED]-------------------------'
+
+        obs_api = os.getenv("OBS_API_URL")
+        obs_user = os.getenv("OBS_API_USERNAME")
+        obs_passwd = os.getenv("OBS_API_PASSWD")
+
+        remote_obs_api = os.getenv("PUBLIC_OBS_API_URL")
+        remote_obs_user = os.getenv("PUBLIC_OBS_API_USERNAME")
+        remote_obs_passwd = os.getenv("PUBLIC_OBS_API_PASSWD")
+
+        remote_meta = ''
+        config = ''
+        remote_package_list = ''
+
+        todo_dict = {}
+
+        # precheck profile
+        fields = ''
+        fields = trigger_info(os.getenv('TRIGGER_INFO'))
+
+        # Check if we've got required field in TRIGGER_INFO
+        for field in ('snapshotdir', 'build_id', 'project'):
+            if field not in fields:
+                print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+                return -1
+
+        self.profile = self.setup_profile(fields['project'])
+        if not self.profile:
+            print 'Skip Sync OBS project %s' % fields['project']
+            return 0
+
+        print '\nJOB Started at %s' % (str(datetime.now()))
+
+        src = profile = self.profile['project']
+        baseproject = self.profile['baseproject']
+        build_id = fields['build_id']
+        buildid = fields['build_id'].split('_')[1]
+        snapshotdir = fields['snapshotdir']
+        #print "buildid %s" %(buildid)
+
+        refname = "%s:ref:" % (profile)
+        target = "%s%s" %(refname,buildid)
+        if os.getenv('TRIGGER_MISC') == 'importrpmdone':
+            return self.run_profile_update(self.profile['project'], target)
+
+        #### remote buildservice ####
+        remote_build = BuildService(remote_obs_api, remote_obs_user, remote_obs_passwd)
+        #### target buildservice ####
+        build = BuildService(obs_api, obs_user, obs_passwd, \
+                             remote_obs_api, remote_obs_user, remote_obs_passwd)
+        #get project list
+        projectlist = [ prj for prj in build.get_package_list("") if prj.split(':')[0] == "Tizen" ]
+        #print "\n********\n  Project List=%s" % projectlist
+
+        refprojectlist = [ obj for obj in projectlist if refname in obj ]
+        #exclude target project in refprojectlist
+        if target in refprojectlist:
+            refprojectlist.remove(target)
+        print "\n  Ref Project List=%s" % refprojectlist
+
+        # get base ref project
+        # get list of ref projects from git-ref-mapping
+        gerrit_env = GerritEnv("PUBLIC_")
+        obs_ref_prjs = get_ref_map(baseproject, None, \
+                                   gitcache=gerrit_env.gitcache, \
+                                   gerrit_hostname=gerrit_env.hostname, \
+                                   gerrit_username=gerrit_env.username, \
+                                   gerrit_sshport=gerrit_env.sshport
+                                   )
+        #TODO:HYOKEUN
+        print 'baseproject:%s' % baseproject
+        print 'obs_ref_prjs:%s' % obs_ref_prjs
+        base_mapping_list = [ x['OBS_package'] for x in obs_ref_prjs if x['OBS_project'] == baseproject ]
+        #TODO:HYOKEUN
+        print 'base_mapping_list:%s' % base_mapping_list
+        baserefproject = ''
+        if base_mapping_list:
+            baserefproject = base_mapping_list[0]
+            print "\n  Ref Base Project from mapping=[%s]" % baserefproject
+        else:
+            if baseproject:
+                basereflists = []
+                [ basereflists.append(obj) for obj in projectlist \
+                  if "%s:ref:" % (baseproject) in obj ]
+                if basereflists:
+                    baserefproject = list(basereflists)[-1]
+                    print "\n  Ref Base Project from latest=[%s]" % baserefproject
+        print "\n********"
+
+        if not refprojectlist: # CopyLocalProject = False
+            src = profile
+            build_src = remote_build
+        else:                  # CopyLocalProject = True
+            src = list(refprojectlist)[-1]
+            build_src = build
+        print "src = %s , target = %s" %(src, target)
+
+        print "  0) Get meta, config, package list from remote [ %s ]" % (profile)
+        remote_meta = remote_build.get_meta(profile)
+        config = remote_build.get_project_config(profile)
+        remote_package_list = remote_build.get_package_list(profile)
+        print '\nGet Package List from Remote Done at %s' % (str(datetime.now()))
+
+        print "\n    0-1) copy package list (%d):\n %s" %(len(remote_package_list), remote_package_list)
+        if not build.exists(target):
+            print "\n    0-2) create project: %s" %(target)
+            self.create_project(build, target, None, remote_meta, config, baserefproject)
+            todo_dict = self.list_packages_from_remote(build_src, build, src, target)
+        else:
+            print "\n    0-2) project already exists: %s" %(target)
+            todo_dict = self.list_packages_from_remote(build_src, build, target, target, existing=True)
+
+        print '\nListing from Remote Done at %s' % (str(datetime.now()))
+        print 'todo_dict(%d):' % (len(todo_dict))
+
+        if True:
+            package_list = [ x for x in todo_dict ]
+            print "\n********"
+            print "  1) package list of target project \n %s" %(package_list)
+
+            packages = self.remove_duplicates(package_list, remote_package_list)
+            print "\n********"
+            print "  2) remove package %s" %(packages)
+            for pkgname in packages:
+                del todo_dict[pkgname]
+                #build.delete_package(target, pkgname)
+
+            packages = self.remove_duplicates(remote_package_list, package_list)
+            print "\n********"
+            print "  3) add packages %s" %(packages)
+            if packages:
+                ret_dict_add = self.list_packages_from_remote(\
+                                        remote_build, build, profile, target, packages=packages)
+                if ret_dict_add:
+                    todo_dict.update(ret_dict_add)
+            print '\nAdd Remove Done at %s' % (str(datetime.now()))
+
+            print "\n********"
+            print "  4) compare package project "
+            manifest_packages = get_manifest_filelists_snapshot(self.profile, \
+                                                 os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"), \
+                                                 snapshotdir,\
+                                                 "builddata/manifest"))
+            todo_dict = self.compare_with_manifest(todo_dict, manifest_packages)
+            print '\nCompare With Manifest Done at %s' % (str(datetime.now()))
+
+            print '\n  4-1) Final packages to be updated %d' % len(todo_dict)
+            sys.stdout.flush()
+            self.update_packages(remote_build, build, profile, target, todo_dict)
+            print '\nUpdate Packages Done at %s' % (str(datetime.now()))
+
+        print "\n********"
+        print "  5) Precheck all error package list from project"
+
+        need_runservice = []
+        for _wait in range(0,15):
+            sys.stdout.flush()
+            sleep(10) # Wait 10 seconds...
+            viewinfofile = build.get_source_viewinfo(target, nofilename=0)
+            root = ElementTree.parse(viewinfofile).getroot()
+            errpackages = ''
+            errpackages = [ s.get('package') for s in root.iter('sourceinfo') if s.findall('error') ]
+            for x in root.iter('sourceinfo'):
+                for y in x.findall('filename'):
+                    if '_service:gbs:service-error.spec' in y.text:
+                        errpackages.append(x.get('package'))
+                        break
+                for y in x.findall('error'):
+                    if 'bad build configuration, no build type' in y.text:
+                        errpackages.remove(x.get('package'))
+            if errpackages:
+                print '    5-1) Under packages are still... (%d)\n    %s' % (len(errpackages), errpackages)
+                # Retrigger git sync
+                for item in errpackages:
+                    pkgview = ElementTree.fromstring(build.get_source_info(target, item))
+                    for sv in pkgview.findall('serviceinfo'):
+                        if sv.get('code') != 'failed': continue
+                        for er in sv.findall('error'):
+                            print '        %s %s with cause: (%s)' % (item, sv.get('code'), er.text)
+                            need_runservice.append(item)
+                            if item not in todo_dict or 'git' not in todo_dict[item]: continue
+                            queue_id = trigger_jenkins_build('TRBS_OBS_UPDATE_GIT', \
+                                                             {'GERRIT_PROJECT': todo_dict[item]['git']}, \
+                                                             cred={'url': os.getenv('REMOTE_TRBS_JENKINS_URL'), \
+                                                                   'username': os.getenv('REMOTE_TRBS_JENKINS_USER'), \
+                                                                   'password': os.getenv('REMOTE_TRBS_JENKINS_PW')}, \
+                                                             block=False)
+                            print '        QUEUED.. %s' % queue_id
+                sys.stdout.flush()
+                sleep(30) # Wait 30 seconds...
+                for item in need_runservice:
+                    print '        runservice for %s' % item
+                    build.runservice(target, item)
+                need_runservice = []
+            else:
+                print '    5-2) All packages imported.'
+                break
+
+        print "\n********"
+        print "  6) create related packages \n"
+        self.create_related_packages(remote_build, build, profile, target)
+
+        print "\n********"
+        print "  7) Sync Done..."
+
+        # importrpm trigger next
+        fields['profile'] = profile
+        fields['target'] = target
+        trigger_next("importrpm_obs", fields)
+
+        if errpackages:
+            return 1
+        return 0
+
+    def main(self):
+
+        print('-----[JOB STARTED: trigger_for_sync_repo ]-----')
+        for loop in range(1):
+            try:
+                return self.run_sync_obs()
+            except Exception, err:
+                print 'run_sync_obs operation failed, retrying...'
+                print err
+                raise LocalError("FAIL %s" % (err))
+                sleep(5)
+
+        return True
+
+if __name__ == '__main__':
+
+    try:
+        trigger = trigger_for_sync_obs()
+        sys.exit(trigger.main())
+    except Exception as e:
+        print(e)
+        sys.exit(1)
+
diff --git a/job_update_public_git.py b/job_update_public_git.py
new file mode 100755 (executable)
index 0000000..b5a7359
--- /dev/null
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2
+#    of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+"""This script will pull latest change to local when remote ref updated..
+"""
+
+import os
+import sys
+
+from common.gerrit import get_gerrit_event, GerritEnv
+from common.git import clone_gitproject
+
+def main():
+    """The main body"""
+
+    print '---[JOB STARTED]----------------------------------------'
+
+    events = get_gerrit_event('PUBLIC_')
+
+    gerrit_env = GerritEnv('PUBLIC_')
+
+    # if project is PUBLIC MAPPING_PRJ, clone full respository, otherwise clone bare
+    # ones.
+    if events['project'] == os.getenv('MAPPING_PRJ'):
+        clone_gitproject(events['project'], \
+                os.path.join(gerrit_env.gitcache, events['project']), \
+                gerrit_hostname=gerrit_env.hostname, \
+                gerrit_username=gerrit_env.username, \
+                gerrit_sshport=gerrit_env.sshport)
+
+    if events['project'] == os.getenv('REF_MAPPING_PRJ'):
+        clone_gitproject(events['project'], \
+                os.path.join(gerrit_env.gitcache, events['project']), \
+                gerrit_hostname=gerrit_env.hostname, \
+                gerrit_username=gerrit_env.username, \
+                gerrit_sshport=gerrit_env.sshport)
+
+    # clone gerrit project to local git cache dir
+    clone_gitproject(events['project'], '%s.git' % \
+            os.path.join(gerrit_env.gitcache, events['project']),
+            bare=True, \
+            gerrit_hostname=gerrit_env.hostname, \
+            gerrit_username=gerrit_env.username, \
+            gerrit_sshport=gerrit_env.sshport)
+
+if __name__ == '__main__':
+    sys.exit(main())
index 003c7f4..fe52c7e 100644 (file)
@@ -17,7 +17,10 @@ Source:         %{name}-%{version}.tar.gz
 Requires:       %{name}-common = %{version}-%{release}
 Requires:       %{name}-submitobs = %{version}-%{release}
 Requires:       %{name}-tzs = %{version}-%{release}
-Requires:       %{name}-init = %{version}-%{release}
+Requires:       %{name}-dependsgraph = %{version}-%{release}
+Requires:       %{name}-trbs = %{version}-%{release}
+Requires:       %{name}-abs = %{version}-%{release}
+Requires:       %{name}-groovyinit = %{version}-%{release}
 BuildArch:      noarch
 
 %define destdir /var/lib/jenkins/%{name}
@@ -54,13 +57,37 @@ Requires:   %{name}-common = %{version}-%{release}
 %description tzs
 Jenkins scripts for tzs customer appliance instance
 
-%package init
+%package groovyinit
 Summary:    Groovy init scripts for jenkins startup
 Group:      Development/Tools/Building
 
-%description init
+%description groovyinit
 Groovy init scripts for jenkins startup
 
+%package dependsgraph
+Summary:    Dependency graph generation tool
+Group:      Development/Tools/Building
+Requires:   %{name}-common = %{version}-%{release}
+
+%description dependsgraph
+Dependency graph generation tool
+
+%package trbs
+Summary:    Tizen Reference Build System scripts
+Group:      Development/Tools/Building
+Requires:   %{name}-common = %{version}-%{release}
+
+%description trbs
+Tizen Reference Build System scripts
+
+%package abs
+Summary:    App Build System scripts
+Group:      Development/Tools/Building
+Requires:   %{name}-common = %{version}-%{release}
+
+%description abs
+App Build System scripts
+
 %package submitobs
 Summary:    Temporary package to isolate job_submitobs
 Group:      Development/Tools/Building
@@ -76,7 +103,7 @@ Isolated job_submitobs to avoid package installation conflicts
 
 %install
 install -d %{buildroot}%{destdir}
-cp -r job_*.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts vis dep_graph %{buildroot}%{destdir}/
+cp -r job_*.py dir-purge-tool.sh logs-collector.sh common obs_requests templates scripts vis dep_graph trbs abs %{buildroot}%{destdir}/
 install -d %{buildroot}%{destinitdir}
 cp -r groovy_init_scripts/* %{buildroot}%{destinitdir}/
 
@@ -109,6 +136,30 @@ fi
 %{destdir}/common/tempbuildpkg.py
 %{destdir}/dir-purge-tool.sh
 %{destdir}/logs-collector.sh
+%{destdir}/job_monitor_scm_meta_git.py
+%{destdir}/job_check_scm_meta_git.py
+%{destdir}/job_ref_snapshot_info_update.py
+%{destdir}/job_ref_create_prj_obs.py
+%{destdir}/job_ref_import_rpm_obs.py
+%{destdir}/job_ref_purge_prj_obs.py
+%{destdir}/job_ref_precheck_project_obs.py
+%{destdir}/job_test_trigger_info_update.py
+%{destdir}/job_update_scm_meta_git_for_dashboard.py
+%{destdir}/job_update_git_obs_mapping_for_dashboard.py
+%{destdir}/job_add_dotnet_launching_performance_test.py
+%{destdir}/job_update_git_branch_project_mapping_for_dashboard.py
+%{destdir}/job_litmus_jira_issue_receiver.py
+%{destdir}/job_litmus_tct_file_receiver.py
+%{destdir}/job_update_nuget.py
+%{destdir}/job_add_git_tag.py
+%{destdir}/job_create_sdkrootstrap.py
+%{destdir}/job_find_incorrect_filenames.py
+%{destdir}/job_importrpm_obs.py
+%{destdir}/job_rsync_download.py
+%{destdir}/job_sync_repo.py
+%{destdir}/job_trigger_for_sync_repo.py
+%{destdir}/job_trigger_obs_sync.py
+%{destdir}/job_update_public_git.py
 
 %files common
 %defattr(-,jenkins,jenkins)
@@ -149,31 +200,17 @@ fi
 %{destdir}/job_sync_obs.py
 %{destdir}/job_sync_snapdiff.py
 %{destdir}/job_buildmonitor.py
-%{destdir}/job_update_nuget.py
-%{destdir}/job_add_git_tag.py
 %{destdir}/job_repa.py
-%{destdir}/job_trbs_test_result_receiver.py
-%{destdir}/job_update_scm_meta_git_for_dashboard.py
-%{destdir}/job_update_git_obs_mapping_for_dashboard.py
-%{destdir}/job_add_dotnet_launching_performance_test.py
-%{destdir}/job_update_git_branch_project_mapping_for_dashboard.py
-%{destdir}/job_litmus_jira_issue_receiver.py
-%{destdir}/job_litmus_tct_file_receiver.py
 %dir %{destdir}/templates
 %{destdir}/templates/index.html
 %{destdir}/job_update_local_git.py
-%{destdir}/job_monitor_scm_meta_git.py
-%{destdir}/job_check_scm_meta_git.py
 %dir %{destdir}/scripts
 %{destdir}/scripts/check_section.sh
 %{destdir}/scripts/get_git_desc_info.sh
 %{destdir}/scripts/nuget.exe
-%{destdir}/job_ref_snapshot_info_update.py
-%{destdir}/job_ref_create_prj_obs.py
-%{destdir}/job_ref_import_rpm_obs.py
-%{destdir}/job_ref_purge_prj_obs.py
-%{destdir}/job_ref_precheck_project_obs.py
-%{destdir}/job_test_trigger_info_update.py
+
+%files dependsgraph
+%defattr(-,jenkins,jenkins)
 %{destdir}/job_make_dep_graph.py
 %{destdir}/common/dep_graph.html.template
 %{destdir}/common/dep_graph.html.template_simple
@@ -223,12 +260,33 @@ fi
 %dir %{destdir}/vis/dist/img/timeline
 %{destdir}/vis/dist/img/timeline/delete.png
 
+%files trbs
+%defattr(-,jenkins,jenkins)
+%dir %{destdir}/common
+%{destdir}/common/trbs.py
+%{destdir}/job_trbs_test_result_receiver.py
+%dir %{destdir}/trbs
+%{destdir}/trbs/job_trbs_autoremove.py
+%{destdir}/trbs/job_trbs_obs.py
+%{destdir}/trbs/job_trbs_post_image.py
+%{destdir}/trbs/job_trbs_submit.py
+%{destdir}/trbs/job_trbs_test_automation.py
+%{destdir}/trbs/job_trbs_update_git.py
+%{destdir}/trbs/job_trbs_sync_aws.py
+
+%files abs
+%defattr(-,jenkins,jenkins)
+%dir %{destdir}/abs
+%{destdir}/abs/job_abs_batch_all.py
+%{destdir}/abs/job_abs_main.py
+%{destdir}/abs/job_abs_update_vm.py
+%{destdir}/abs/report_template
 
 %files tzs
 %defattr(-,jenkins,jenkins)
 %{destdir}/job_submit.py
 
-%files init
+%files groovyinit
 %defattr(-,jenkins,jenkins)
 %dir /var/lib/jenkins/
 %dir %{destinitdir}/
diff --git a/trbs/job_trbs_autoremove.py b/trbs/job_trbs_autoremove.py
new file mode 100755 (executable)
index 0000000..2884e7a
--- /dev/null
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+
+import os
+import sys
+from datetime import datetime
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.buildservice import BuildService
+from common.gerrit import GerritEnv
+from common.mapping import get_ref_map
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+def secure_tag_time_format(tag_info):
+    _YYYYmmdd = tag_info.split('.')[0]
+    _HHMMSS = tag_info.split('.')[-1]
+
+    _YYYY = int(_YYYYmmdd) / 10000
+    _mm = int(_YYYYmmdd) / 100 % 100
+    _dd = int(_YYYYmmdd) % 100
+    _HH = int(_HHMMSS) / 10000
+    _MM = int(_HHMMSS) / 100 % 100
+    _SS = int(_HHMMSS) % 100
+
+    if _mm > 12: _mm = 12
+    if _dd > 31: _dd = 31
+    if _HH > 23: _HH = 23
+    if _MM > 59: _MM = 59
+    if _SS > 59: _SS = 59
+
+    return '%04d%02d%02d.%02d%02d%02d' % (_YYYY, _mm, _dd, _HH, _MM, _SS)
+
+def main():
+    """
+    Auto remove project
+    """
+
+    obs_api = os.getenv("OBS_API_URL")
+    obs_user = os.getenv("OBS_API_USERNAME")
+    obs_passwd = os.getenv("OBS_API_PASSWD")
+
+    # prepare separate temp directory for each build
+    gerrit_env = GerritEnv("PUBLIC_")
+    git_cache = gerrit_env.gitcache
+
+    # get list of ref projects from git-ref-mapping
+    obs_ref_prjs = get_ref_map('/',None, \
+                               gitcache=gerrit_env.gitcache, \
+                               gerrit_hostname=gerrit_env.hostname, \
+                               gerrit_username=gerrit_env.username, \
+                               gerrit_sshport=gerrit_env.sshport
+                               )
+    #print 'obs_ref_prjs = %s' %obs_ref_prjs
+
+    build = BuildService(obs_api, obs_user, obs_passwd)
+
+    #### Remove the trbs project ( 3day )####
+    remove_hour = (7 * 24)
+    prj_list = [ obj for obj in build.get_package_list("") if not obj.find(":trbs:") == -1 ]
+    print "trbs obs project = %s"  %(prj_list)
+    now_datetime = datetime.now()
+
+    for prj in prj_list:
+        #print prj, prj.split(':')[-1]
+        newtime = secure_tag_time_format(prj.split(':')[-1])
+        create_datetime = datetime.strptime(newtime, "%Y%m%d.%H%M%S")
+        #print create_datetime
+        post_hour = divmod((now_datetime - create_datetime).total_seconds(), 3600)[0]
+        if int(post_hour) > int(remove_hour):
+            build.cleanup(prj, "This project has expired")
+            print "%s project is remove." % prj
+
+    #### Remove the ref obs project ( 14day) ####
+    remove_hour = (21 * 24)
+    prj_list = [ obj for obj in build.get_package_list("") \
+                 if not obj.find("Tizen:") == -1 \
+                 and not obj.find(":ref:") == -1 \
+                 and obj.find(":trbs:") == -1 \
+                 and obj.find(":Base:") == -1 ]
+    print "ref obs project = %s"  %(prj_list)
+    now_datetime = datetime.now()
+    for prj in prj_list:
+        #print prj, prj.split(':')[-1]
+        create_datetime = datetime.strptime(prj.split(':')[-1], "%Y%m%d.%H")
+        #print create_datetime
+        post_hour = divmod((now_datetime - create_datetime).total_seconds(), 3600)[0]
+        if int(post_hour) > int(remove_hour):
+            cleanup = True
+            for reftarget in obs_ref_prjs:
+                #print 'ref=%s prj=%s' %(reftarget['OBS_staging_project'],prj)
+                if reftarget['OBS_staging_project'] == prj:
+                    cleanup = False
+                    break
+            if cleanup:
+                build.cleanup(prj, "This project has expired")
+                print "%s project is remove." % prj
+            else:
+                print "%s project is reference project. Skip remove project !!!!" % prj
+
+    return
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main())
+    except LocalError, err:
+        print err
+        sys.exit(1)
diff --git a/trbs/job_trbs_obs.py b/trbs/job_trbs_obs.py
new file mode 100755 (executable)
index 0000000..30facf7
--- /dev/null
@@ -0,0 +1,555 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+"""
+This code is called by jenkins jobs triggered by OBS events.
+"""
+
+import os
+import sys
+import re
+import shutil
+import base64
+import datetime
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+from common.repomaker import find_files, RepoMaker, RepoMakerError
+from common.buildtrigger import trigger_info, trigger_next
+from common.buildservice import BuildService
+from common.snapshot import Snapshot, SnapshotError
+from common.backenddb import BackendDB
+from common.trbs import get_info_from_trbs_name, get_trbs_project_name
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+def update_ks(imagedata, snapshot_id, pkg_urls, repo_name, base_project=None):
+    """
+    update the repo url point to right URL and add trbs repo
+    url with highest priority
+    Args:
+         imagedata (ImageData): [(ks_file_name, ks_file_content),]
+         backenddb (BackendDB): backend database instance
+         data: data from backend db
+    """
+
+    # Figure out repo line
+    repo_lines = {}
+    for arch, url in pkg_urls.iteritems():
+        repo_lines[arch] = "repo --name=trbs --baseurl=%s/ --save "\
+                          "--ssl_verify=no --priority 1" % url
+
+    images_ks = {}
+    # update ULRs in ks file
+    for name, content in imagedata.ksi.items():
+        repo_line = repo_lines.get(imagedata.images[name]['arch'])
+        if not repo_line:
+            # skip architectures without repos
+            continue
+        new_ks_lines = []
+        for line in content.splitlines():
+            if line.startswith('repo ') and \
+                    'baseurl=' in line and \
+                    '@BUILD_ID@' in line:
+                match = re.match(r"repo --name=([\w\-\.]*) --baseurl=.*",
+                                 line)
+                if match:
+                    line = re.sub(r'@BUILD_ID@', snapshot_id, line)
+                    #TODO: Change snapshot server from /snapshots/ to /public_mirror/
+                    if '/snapshots/' in line:
+                        line = line.replace('/snapshots/', '/public_mirror/')
+            elif line.startswith('repo ') and \
+                    'baseurl=' in line:
+                match = re.match(r"repo --name=([\w\-\.]*) --baseurl=.*",
+                                 line)
+                if match:
+                    #TODO: Change snapshot server from /snapshots/ to /public_mirror/
+                    if '/snapshots/' in line:
+                        line = line.replace('/snapshots/', '/public_mirror/')
+                #BASE replace
+                if base_project and base_project != 'latest':
+                    match = re.match(r"repo --name=base([\w\-\.]*) --baseurl=.*/latest/repos/.*",
+                                     line)
+                    if match:
+                        base_path = [ x for x in base_project.lower().split(':') if x != 'ref' ]
+                        line = line.replace('/latest/', '/%s/' % ('-'.join(base_path[0:-1]) + '_' + base_path[-1]))
+                print 'line=%s' %(line)
+
+            new_ks_lines.append(line)
+
+        new_ks_lines.insert(new_ks_lines.index('%packages')-1, repo_line)
+        # Update the ks files in imagedata
+        new_ks_content = '\n'.join(new_ks_lines)
+        images_ks[name] = new_ks_content
+
+    return images_ks
+
+def trigger_image_creation(images_ks, build_id, path_repo,
+                           project, url_pub_base, repo_name, download_num=1):
+    """
+    trigger_image_creation:
+        Prepare the data and trigger the image_creation jobs
+    Args:
+         images_ks (truple list): [(ks_file_name, ks_file_content),]
+         build_id (str): the trbs repo build_id
+    """
+    count = 0
+    for index, (ksname, kickstart) in enumerate(images_ks.items()):
+        count += 1
+        name = ksname.replace('.ks', '')
+        data = {'name': name,
+                'kickstart': kickstart,
+                'buildid': build_id,
+                'images_path': os.path.join("images", repo_name, name),
+                'project': project,
+                'repo_path': path_repo,
+                'repo': repo_name,
+                'url_pub_base': url_pub_base,
+                'download_num': download_num
+                }
+        # add image_building for iris
+        pub_enabled = os.getenv("IRIS_PUB_ENABLED", "0") != "0"
+        if pub_enabled:
+            rest = IrisRestClient(
+                os.getenv("IRIS_SERVER"),
+                os.getenv("IRIS_USERNAME"),
+                base64.b64decode(os.getenv('IRIS_PASSWORDX', '')))
+            rest.publish_event("image_building", {
+                "project": project,
+                "repo": repo_name,
+                "name": name,
+                })
+        trigger_next('%s/image_trigger_%s_%s' % (os.getenv('WORKSPACE'),
+                                                 repo_name, index), data)
+    return count
+
+def get_parent_project_name(target_project):
+    return target_project.split(':ref')[0]
+
+def make_repo(project, repo, backenddb, base_url, base_path, live_repo_base, build, block=False):
+    """
+    make repo.
+
+    Args:
+        project (str): OBS trbs project name
+        repo (str): name of the OBS live repository
+        backenddb (BackendDB): backenddb instance
+    Raises:
+        LocalError if can't create repos or can't find image configurations
+    """
+
+    print 'MAKE_REPO(project=%s, repo=%s, base_url=%s, base_path=%s, live_repo_base=%s)' \
+          % (project, repo, base_url, base_path, live_repo_base)
+    images_count = 0
+    create_repo = []
+    repos = {}
+    imagedatas = {}
+
+    # Make build id from latest snapshot + project suffix
+    target_project, basebuildid, tstamp = get_info_from_trbs_name(project)
+    try:
+        snapshot = Snapshot(backenddb, base_path, obs_project=target_project)
+    except SnapshotError, err:
+        raise LocalError("Error getting snapshot info: %s" % str(err))
+
+    try:
+        trbs = snapshot.get_trbs(base_url, tstamp, basebuildid)
+    except SnapshotError, err:
+        raise LocalError("Error getting trbs info: %s" % str(err))
+
+    print "basebuildid: %s" % basebuildid
+    print "trbs.build_id: %s" % trbs.build_id
+    print "trbs.path: %s" % trbs.path
+    print "trbs.dir: %s" % trbs.dir
+    print "trbs.snap_buildid: %s" % trbs.snap_buildid
+
+    # Convert live repo to download structure
+    live_repo_path = os.path.join(live_repo_base,
+                                  project.replace(':', ':/'))
+    repo_dir = os.path.join(trbs.path, "repos")
+
+    print "live_repo_path: %s" % live_repo_path
+    print "repo_dir: %s" % repo_dir
+
+    snapshot_path = os.path.join(snapshot.base_path, trbs.dir).replace('trbs', 'public_mirror')
+
+    print 'snpashot_pth: %s' %(snapshot_path)
+
+    targets = snapshot.targets
+    images_count = 0
+
+    build_info = build.get_info(project)
+    if build_info.get('download_num'):
+        current_download_num = build_info.get('download_num') + 1
+    else:
+        current_download_num = int(1)
+    print 'Current download_num = %d' % current_download_num
+    base_project = build_info.get('base', 'latest')
+    print 'BASE PROJECT : %s' % (base_project)
+
+    # Cleanup repo directory
+    if os.path.isdir(os.path.join(trbs.path, trbs.build_id)):
+        print " repo path= %s exits. Cleanup repo directory" % os.path.join(trbs.path, trbs.build_id)
+        shutil.rmtree(os.path.join(trbs.path, trbs.build_id))
+
+    if buildmonitor_enabled:
+        global bm_snapshot_name
+        global bm_snapshot_url
+        bm_snapshot_name = trbs.build_id
+        bm_snapshot_url = os.path.join(base_url, trbs.dir, trbs.build_id)
+
+    for repo in targets:
+        if block:
+            repos.update({repo['Name']: {'archs': list(set(repo['Architectures']))}})
+            continue
+
+        repomaker = RepoMaker(trbs.build_id,
+                              os.path.join(trbs.path,
+                                           trbs.build_id))
+        try:
+            repomaker.add_repo(live_repo_path, repo['Name'], repo['Architectures'], move=False)
+        except RepoMakerError, err:
+            raise LocalError("Unable to create download repo: %s" % err)
+
+        # Assuming that there can be just one image-configurations- rpm in the repo
+        if not repomaker.has_images():
+            # repomaker did not found image-configurations in pre_release repo,
+            # let's take it from target repo, only one package repo is enough
+
+            # Add image configuration to trbs repo
+            img_conf = find_files(os.path.join(snapshot_path, 'repos', repo['Name']),
+                                  prefix="image-configurations-",
+                                  suffix='noarch.rpm')
+            img_conf_list = list(img_conf)
+            # whether exist package of image-configuration
+            if not img_conf_list:
+                if buildmonitor_enabled:
+                    print '[%s][LocalError] bm_git_tag(%s)\n' % (__file__, bm_git_tag)
+                    #buildmonitor.update_fail_status_for_sr_stage(project, bm_git_tag)
+                    bm_stage = 'Pre_Snap_Fail'
+                    bm_data = {"bm_stage": bm_stage,
+                               "project" : project,
+                               "bm_git_tag" : bm_git_tag,
+                              }
+                    trigger_next("BUILD-MONITOR-4-%s" % bm_stage, bm_data)
+
+                #raise LocalError("Image configuration not found in %s" %
+                #        os.path.join(snapshot_path, 'repos', repo['Name']))
+                print "Image configuration not found in %s" \
+                      % os.path.join(snapshot_path, 'repos', repo['Name'])
+                continue
+
+            for rpm in img_conf_list:
+                repomaker.load_imagedata(rpm)
+
+        # whether exist ks poin to the repo
+        if not repomaker.has_images():
+            continue
+
+        # trigger post snapshot creation job with repo data
+        # buildlogs.job
+        imagedatas[repo['Name']] = repomaker.imagedata
+        repos.update(repomaker.repos)
+
+        trbs_pkg_urls = trbs.pkg_urls(repo['Name'])
+
+        print trbs_pkg_urls
+
+        # Update ks files
+        images_ks = update_ks(repomaker.imagedata, 
+                              trbs.snap_buildid,
+                              trbs.pkg_urls(repo['Name']),
+                              repo['Name'],
+                              base_project)
+
+        if buildmonitor_enabled:
+            bm_pkg_urls_dic = trbs.pkg_urls(repo['Name'])
+            print '[%s] trbs.pkg_urls(%s), base_path(%s)\n' \
+                  % (__file__, bm_pkg_urls_dic, base_path)
+            bm_repo = repo['Name']
+            bm_arch = repo['Architectures'][0]
+            bm_pkg_url = bm_pkg_urls_dic[bm_arch]
+            bm_pkg_dir = bm_pkg_url.replace(base_url, base_path)
+            #print '[%s] bm_arch(%s), bm_pkg_dir(%s), os.listdir(bm_pkg_dir)(%s)\n' \
+            #      % (__file__, bm_arch, bm_pkg_dir, os.listdir(bm_pkg_dir))
+
+            if bm_arch == 'ia32':
+                bm_arch = 'i686'
+
+            # get rpm files
+            bm_pkg_name_lst = []
+            bm_pkg_mdate_lst = []
+            bm_pkg_size_lst = []
+            bm_trg_count = 0
+            bm_pkg_count = 0
+            BM_PKG_LIMIT = 1100
+            for root, dirs, files in os.walk(bm_pkg_dir):
+                for each_file in files:
+                    if each_file.endswith(".rpm"):
+                        rpm_file_path = root + '/' + each_file
+                        rpm_file_mdate = os.path.getmtime(rpm_file_path)
+                        rpm_file_size = os.path.getsize(rpm_file_path)
+                        bm_pkg_name_lst.append(each_file)
+                        bm_pkg_mdate_lst.append(rpm_file_mdate)
+                        bm_pkg_size_lst.append(rpm_file_size)
+                        #print '[%s] rpm_file_path(%s), rpm_file_mdate(%s), rpm_file_size(%s)\n' \
+                        #      % (__file__, rpm_file_path, rpm_file_mdate, rpm_file_size)
+
+                        # divide the big pkgs
+                        bm_pkg_count += 1
+                        #print '[%s] bm_pkg_count(%s), BM_PKG_LIMIT(%s)\n' \
+                        #      % (__file__, bm_pkg_count, BM_PKG_LIMIT)
+                        if bm_pkg_count >= BM_PKG_LIMIT:
+                            # for trigger
+                            bm_stage = 'Pre_Snap_packages'
+                            bm_data = {"bm_stage" : bm_stage,
+                                       "project" : project,
+                                       "bm_repo" : bm_repo,
+                                       "bm_arch" : bm_arch,
+                                       "bm_pkg_url" : bm_pkg_url,
+                                       "bm_pkg_name_lst" : bm_pkg_name_lst,
+                                       "bm_pkg_mdate_lst" : bm_pkg_mdate_lst,
+                                       "bm_pkg_size_lst" : bm_pkg_size_lst,
+                                       "bm_trg_count" : bm_trg_count,
+                                       "bm_pkg_count" : bm_pkg_count,
+                                       "BM_PKG_LIMIT" : BM_PKG_LIMIT,
+                                      }
+                            trigger_next("BUILD-MONITOR-2-%s-%s-%s-trg_%s" % (bm_stage, bm_repo, bm_arch, bm_trg_count), bm_data)
+
+                            # clear the data
+                            bm_pkg_count = 0
+                            bm_trg_count += 1
+                            bm_pkg_name_lst = []
+                            bm_pkg_mdate_lst = []
+                            bm_pkg_size_lst = []
+                            #print '[%s] reach the BM_PKG_LIMIT!!(%s), bm_pkg_count(%s), bm_trg_count(%s)\n' \
+                            #      % (__file__, BM_PKG_LIMIT, bm_pkg_count, bm_trg_count)
+
+            # for rest pkgs
+            #buildmonitor.create_snapshot_packages_for_build_snapshot_package(project, bm_snapshot_name,
+            #                                                                 repo['Name'], repo['Architectures'][0],
+            #                                                                 bm_pkg_urls_dic, base_url, base_path)
+            bm_stage = 'Pre_Snap_packages'
+            bm_data = {"bm_stage" : bm_stage,
+                       "project" : project,
+                       "bm_repo" : bm_repo,
+                       "bm_arch" : bm_arch,
+                       "bm_pkg_url" : bm_pkg_url,
+                       "bm_pkg_name_lst" : bm_pkg_name_lst,
+                       "bm_pkg_mdate_lst" : bm_pkg_mdate_lst,
+                       "bm_pkg_size_lst" : bm_pkg_size_lst,
+                       "bm_pkg_size_lst" : bm_pkg_size_lst,
+                       "bm_trg_count" : bm_trg_count,
+                       "bm_pkg_count" : bm_pkg_count,
+                       "BM_PKG_LIMIT" : BM_PKG_LIMIT,
+                      }
+            #print '[%s] for rest pkgs!! BM_PKG_LIMIT(%s), bm_pkg_count(%s), bm_trg_count(%s)\n' \
+            #      % (__file__, BM_PKG_LIMIT, bm_pkg_count, bm_trg_count)
+            trigger_next("BUILD-MONITOR-2-%s-%s-%s-trg_%s" % (bm_stage, bm_repo, bm_arch, bm_trg_count), bm_data)
+
+        # Generate image info to builddata/ dir
+        repomaker.gen_image_info(images_ks)
+
+        # trigger image creation jobs
+        images_count += trigger_image_creation(images_ks, trbs.build_id,
+                               os.path.join(trbs.dir,trbs.build_id),
+                               project, base_url, repo['Name'], download_num=current_download_num)
+
+    build.update_info({'images_count': images_count,
+                       'images': [],
+                       'download_url':os.path.join(base_url, trbs.dir, trbs.build_id),
+                       'download_num': int(1)
+                      }, project)
+
+    #TODO: IMAGER want to read download_num to give up; rsync operation.
+    try:
+        with open(os.path.join(trbs.path, trbs.build_id, 'buildinfo.in'), 'w') as df:
+            df.write('download_num=%d\n' % current_download_num)
+    except Exception, err:
+        print 'not to update download_num note. %s' % str(err)
+
+    # trigger post snapshot creation job with repo data
+    # buildlogs.job
+    data = {'project': project,
+        'repo': repos,
+        'repo_path': os.path.join(trbs.dir, trbs.build_id),
+        'build_id': trbs.build_id
+    }
+    parm_backend = {}
+    for bknd in ['BACKEND_02', 'BACKEND_01']:
+        if os.getenv('%s_REGEX' % bknd) and \
+            re.search(r'%s' % os.getenv('%s_REGEX' % bknd), data['project']) is not None:
+            parm_backend['BACKEND_SELECTION'] = bknd
+            break
+    trigger_next("post-buildlogs", data, extra_params=parm_backend)
+
+    return 1
+
+def project_cleanup(backenddb, build, base_path, base_url, event_dict):
+    """ request(SR) end of life, this founction should be called to
+    delete the trbs project """
+
+    # Event is from project delted
+    trbs_project_name = event_dict.get("project") or\
+        event_dict.get("sourceproject")
+
+    try:
+        target_project, basebuildid, time_stamp = \
+            get_info_from_trbs_name(trbs_project_name)
+    except ValueError:
+        print "Can not get trbs project info from project name," \
+            "take no action to %s" % trbs_project_name
+
+    # Get trbs data from db
+    try:
+        snapshot = Snapshot(backenddb, base_path, obs_project=target_project)
+        trbs = snapshot.get_trbs(base_url, time_stamp, basebuildid)
+    except SnapshotError, err:
+        raise LocalError("Error getting trbs data: %s" % str(err))
+
+    if os.path.isdir(os.path.join(trbs.path, trbs.build_id)):
+        shutil.rmtree(os.path.join(trbs.path, trbs.build_id))
+        print 'Removing the snapshot project: %s' % os.path.join(trbs.path, trbs.build_id)
+        # TRIGGER NEXT SYNC-AWS
+        if os.getenv("TRBS_SYNC_AWS_ENABLED", "0") != "0":
+            data = {"remove_path": os.path.join(trbs.path, trbs.build_id)}
+            trigger_next('SYNC-AWS', data)
+    else:
+        print 'The snapshot project: %s is not present' % os.path.join(trbs.path, trbs.build_id)
+
+    return
+
+def check_build_fail(unresolvable_broken_failed_status):
+    for repo in unresolvable_broken_failed_status:
+        for arch in unresolvable_broken_failed_status[repo]:
+            for p in unresolvable_broken_failed_status[repo][arch]:
+                if p[-10:] != "_aggregate":
+                    return True
+    return False
+
+def get_unresolvable_broken_packages(unresolvable_broken_failed_status):
+    unresolvable_broken_packages = {}
+
+    for repo in unresolvable_broken_failed_status:
+        unresolvable_broken_packages[repo] = {}
+        for arch in unresolvable_broken_failed_status[repo]:
+            unresolvable_broken_packages[repo][arch] = {}
+            for p in unresolvable_broken_failed_status[repo][arch]:
+                if unresolvable_broken_failed_status[repo][arch][p] in ("unresolvable", "broken"):
+                    unresolvable_broken_packages[repo][arch][p] = unresolvable_broken_failed_status[repo][arch][p]
+
+    return unresolvable_broken_packages
+
+def main(action):
+    """Script entry point.
+       Parameters:
+          action - cleanup or create_images
+    """
+
+    print '---[JOB STARTED: %s ]-------------------------' % action
+    global buildmonitor_enabled
+    buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+    print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
+    if buildmonitor_enabled:
+        bm_start_datetime = datetime.datetime.now()
+        global bm_git_tag # for execption handling
+        bm_git_tag = None
+
+    obs_api = os.getenv("OBS_API_URL")
+    obs_user = os.getenv("OBS_API_USERNAME")
+    obs_passwd = os.getenv("OBS_API_PASSWD")
+    base_url = os.getenv("URL_PUBLIC_REPO_BASE")
+    base_path = os.getenv('PATH_REPO_BASE')
+    live_repo_base = os.getenv('PATH_LIVE_REPO_BASE')
+
+    content = trigger_info(os.getenv("TRIGGER_INFO"))
+
+    project = content.get("project") or content.get("sourceproject")
+
+    build = BuildService(obs_api, obs_user, obs_passwd)
+
+    # Init backend database
+    backenddb = BackendDB(os.getenv("REDIS_HOST"), int(os.getenv("REDIS_PORT")))
+
+    if action == 'create_images':
+        repo = content.get("repo")
+        info = build.get_info(project)
+
+        buildstatus = build.getbuildstatus(project)
+        print 'buildstatus=%s' %(buildstatus)
+        build.update_buildstatus(buildstatus,project)
+        global bBuildFail
+
+        unresolvable_broken_failed_status = build.get_package_build_result(project, ("unresolvable", "broken", "failed"))
+        bBuildFail = check_build_fail(unresolvable_broken_failed_status)
+
+        if buildmonitor_enabled:
+            bm_git_tag = info['git_tag']
+            #buildmonitor.start_pre_create_snapshot_for_sr_stage(project, bm_git_tag, bm_start_datetime)
+            #buildmonitor.start_pre_create_snapshot_for_build_snapshot(project, bm_start_datetime)
+            bm_stage = 'Pre_Snap_Start'
+            bm_data = {"bm_stage" : bm_stage,
+                       "project" : project,
+                       "bm_git_tag" : bm_git_tag,
+                       "bm_start_datetime": str(bm_start_datetime),
+                       "bBuildFail": bBuildFail,
+                       "unresolvable_broken_packages": get_unresolvable_broken_packages(unresolvable_broken_failed_status)
+                      }
+            trigger_next("BUILD-MONITOR-1-%s" % bm_stage, bm_data)
+
+        if not make_repo(project, repo, backenddb, base_url, base_path, live_repo_base, build, block=bBuildFail):
+            return 0
+    elif action == 'cleanup':
+        # request(SR) end of life, this founction should be called to
+        # delete the trbs project "
+        project_cleanup(backenddb, build, base_path, base_url, content)
+    else:
+        raise LocalError("Not supported method of pre_trbs_obs job: %s" \
+                          % action)
+
+    if buildmonitor_enabled and action == 'create_images' and bBuildFail != True:
+        info = build.get_info(project)
+        bm_snapshot_num = info['download_num']
+        print '[%s] bm_snapshot_num(%s)\n' \
+              % (__file__, bm_snapshot_num)
+        bm_end_datetime = datetime.datetime.now()
+        #print '[%s] project(%s), bm_git_tag(%s), start_time(%s), end_time(%s)\n' \
+        #      % (__file__, project, bm_git_tag, bm_start_datetime, bm_end_datetime)
+        # for sr_stage & build_snapshot
+        #buildmonitor.end_pre_create_snapshot_for_sr_stage(project, bm_git_tag,
+        #                                                  bm_start_datetime,
+        #                                                  bm_end_datetime)
+        #buildmonitor.end_pre_create_snapshot_for_build_snapshot(project,
+        #                                                        bm_snapshot_name,
+        #                                                        bm_snapshot_url,
+        #                                                        bm_end_datetime)
+        bm_stage = 'Pre_Snap_End'
+        bm_data = {"bm_stage": bm_stage,
+                   "project" : project,
+                   "bm_git_tag": bm_git_tag,
+                   "bm_start_datetime": str(bm_start_datetime),
+                   "bm_end_datetime": str(bm_end_datetime),
+                   "bm_snapshot_name" : bm_snapshot_name,
+                   "bm_snapshot_url" : bm_snapshot_url,
+                   "bm_snapshot_num" : bm_snapshot_num,
+                    }
+        trigger_next("BUILD-MONITOR-3-%s" % bm_stage, bm_data)
+
+if __name__ == '__main__':
+    try:
+        sys.exit(main(sys.argv[1]))
+    except LocalError, error:
+        print error
+        sys.exit(1)
diff --git a/trbs/job_trbs_post_image.py b/trbs/job_trbs_post_image.py
new file mode 100755 (executable)
index 0000000..3b94eb7
--- /dev/null
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2
+#    of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+"""This script is used to update infomation about images"""
+
+import os
+import sys
+import base64
+import datetime
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.buildtrigger import trigger_info, trigger_next
+from common.buildservice import BuildService
+from common.trbs import is_trbs_project
+from common.iris_rest_client import IrisRestClient
+from common.send_mail import prepare_mail
+from common.gerrit import Gerrit
+
+def send_mail_buildstatus_message(info, project):
+    """ send mail about buildstatus """
+    EMAIL_TITLE = "[%s][%s][#%s] (Build:%s Images%s)"
+    EMAIL_BODY = "OBS Build and Snapshot and Image creator status \n" \
+                 "Please check the following details.\n\n" \
+                 "Submitter : %s\n\n" \
+                 "Reference project : %s\n" \
+                 "Git tag : %s\n" \
+                 "Git Repository : %s\n" \
+                 "Build status : %s\n" \
+                 "Image status : %s\n" \
+                 "Snapshot url: %s\n\n" \
+                 "For more details please check: %s"
+    EMAIL_FOOTER = '\n\n--------------------------------------------------------\n'\
+                   'Automatically generated by backend service.\n'\
+                   'Please DO NOT Reply!'
+
+    #build status
+    buildstatus=''
+    title_build = 'Succeeded'
+    for status in info.get("buildstatus"):
+        buildstatus += "\n Repository: %s arch: %s (%s:%s)" \
+                       % (status.get('repo'),status.get('arch'), \
+                          status.get('code'),status.get('count') \
+                         )
+        if status.get('code') != 'succeeded':
+            title_build = 'Failed'
+    buildstatus += '\n'
+    imgsuccess, imgfailed, imgstatus = 0, 0, ''
+    submitters = info['submitter'] or ""
+    for image in info['images']:
+        imgstatus  += "\n Repository : %s Status : (%s) %s " \
+                      % (image['name'], image['status'], image['url'])
+        if image['status'] == 'success':
+            imgsuccess += 1
+        if image['status'] == 'failed':
+            imgfailed += 1
+    imgstatus += "\n success (%s)  failed (%s) \n" % (imgsuccess, imgfailed)
+    if imgfailed == 0:
+        title_images = 'Succeeded'
+    else:
+        title_images = 'Failed'
+
+    obs_url = os.path.join( os.getenv("OBS_URL") , "project/show?project=" + project)
+
+    project=''
+    for prj in info['projects']:
+        project += '   \n' + prj
+
+    #make a message
+    title = EMAIL_TITLE % ("TRBS", info['git_tag'], info['download_num'], \
+                           title_build, title_images)
+    email_body = EMAIL_BODY % (submitters, \
+                               info['obs_target_prj'], \
+                               info['git_tag'], \
+                               project, \
+                               buildstatus, \
+                               imgstatus, \
+                               info['download_url'], \
+                               obs_url)
+    msg = 'Hello \n\n' + email_body + EMAIL_FOOTER
+    #print title
+    #print msg
+    submitter = info['submitter']
+    #print submitter
+    if submitter:
+        submitter = "onstudy <onstudy@samsung.com>"
+        prepare_mail("%s.env" % os.getenv('BUILD_TAG'), title, msg,
+                     os.getenv('NOREPLY_EMAIL_SENDER'), submitter)
+    
+    if False:
+        mygerrit = Gerrit(os.getenv("GERRIT_HOSTNAME"), \
+                          os.getenv("GERRIT_USERNAME"), \
+                          os.getenv("GERRIT_SSHPORT"))
+        try:
+            mygerrit.review(commit=info['git_commit'], message=msg)
+        except GerritError, err:
+            print >> sys.stderr, 'Error posting review comment '\
+                                 'back to Gerrit: %s' % str(err)
+            # return 1 if this exception is not caused by invalid commit
+            if 'no such patch set' not in str(err):
+                return False
+    return True
+
+def main():
+    """The main body"""
+    buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+    print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
+    if buildmonitor_enabled:
+        bm_start_datetime = datetime.datetime.now()
+
+    obs_api = os.getenv("OBS_API_URL")
+    obs_user = os.getenv("OBS_API_USERNAME")
+    obs_passwd = os.getenv("OBS_API_PASSWD")
+
+    content = trigger_info(os.getenv('TRIGGER_INFO'))
+
+    project = content.get('project', '')
+    name = content.get('name')
+    status = content.get('status', '')
+    url = content.get('url', '')
+    # add image_created log for iris
+    pub_enabled = os.getenv("IRIS_PUB_ENABLED", "0") != "0"
+    if pub_enabled:
+        rest = IrisRestClient(
+            os.getenv("IRIS_SERVER"),
+            os.getenv("IRIS_USERNAME"),
+            base64.b64decode(os.getenv('IRIS_PASSWORDX', '')))
+        rest.publish_event("image_created", {
+        "project": project,
+        "status": status,
+        "name": name,
+        "url": url,
+        })
+
+    # If the project is trbs project, trigger the post-image-creation
+    if is_trbs_project(project):
+        build = BuildService(obs_api, obs_user, obs_passwd)
+
+        saveinfo = build.get_info(project)
+        if saveinfo.get('download_num', 0) != content.get('download_num', 1):
+            print 'EXCEPTION* mismatch download_num current:%s, obs:%s' \
+                  % (content.get('download_num'), saveinfo.get('download_num'))
+            print 'SKIP UPDATING BUILD INFO'
+            return
+
+        info = {"images":
+                    [{"name": name,
+                     "status": status,
+                     "url": url}]
+                }
+        build.update_info(info, project)
+
+        try:
+            saveinfo = build.get_info(project)
+            print "images count : %s %s \n" % ( len(saveinfo['images']), saveinfo['images_count'])
+            if len(saveinfo['images']) == int(saveinfo['images_count']):
+                # send mail to submitter
+                #send_mail_buildstatus_message(saveinfo,project)
+
+                # trigger next 
+                trigger_next('TEST-AUTOMATION', saveinfo) 
+                               
+            if buildmonitor_enabled:
+                if len(saveinfo['images']) == saveinfo['images_count']:
+                    print '[%s] last image(%s)\n' \
+                          % (__file__, len(saveinfo['images']))
+                    #buildmonitor.end_create_image_for_sr_stage(bm_start_datetime,
+                    #                                           project)
+                    bm_stage = 'Post_Image'
+                    bm_data = {"bm_stage" : bm_stage,
+                               "bm_start_datetime" : str(bm_start_datetime),
+                               "project" : project,
+                              }
+                    trigger_next("BUILD-MONITOR", bm_data)
+        except:
+            print 'exception images_count'
+            if buildmonitor_enabled:
+                print '[%s] last image(%s)\n' \
+                      % (__file__, len(saveinfo['images']))
+                #buildmonitor.end_create_image_for_sr_stage(bm_start_datetime,
+                #                                           project)
+                bm_stage = 'Post_Image'
+                bm_data = {"bm_stage" : bm_stage,
+                           "bm_start_datetime" : str(bm_start_datetime),
+                           "project" : project,
+                          }
+                trigger_next("BUILD-MONITOR", bm_data)
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/trbs/job_trbs_submit.py b/trbs/job_trbs_submit.py
new file mode 100755 (executable)
index 0000000..fc5463d
--- /dev/null
@@ -0,0 +1,849 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2
+#    of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+"""
+This job is triggered by Gerrit RefUpdate event.
+"""
+
+import os
+import sys
+import json
+import re
+import xml.etree.cElementTree as ElementTree
+from xml.sax.saxutils import escape
+from time import sleep
+import datetime
+
+from osc import core
+from gitbuildsys.errors import ObsError
+
+from gbp.rpm import SpecFile
+from gbp.git.repository import GitRepositoryError
+from gbp.errors import GbpError
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common import utils
+from common.mapping import git_obs_map, get_ref_map, git_virtual_branch_map
+from common.git import Git, clone_gitproject
+from common.upload_service import upload_obs_service, UploadError
+from common.buildservice import BuildService
+from common.buildtrigger import trigger_info, trigger_next, get_jenkins_instance
+from common.send_mail import prepare_mail
+from common.gerrit import Gerrit, get_gerrit_event, GerritError, is_ref_deleted, GerritEnv
+from common.trbs import get_trbs_project_name, trbs_enabled, is_trbs_project, get_ref_prj_from_trbs_name
+
+WRONG_DATE_MSG = '- The date %s in tag does NOT follow correct format.\n You can'\
+                 ' use shell command "date --utc +%%Y%%m%%d.%%H%%M%%S" to '\
+                 'generate it, like 20120801.083113.'
+
+UNDER_REVIEW_MSG = '- Submission %s has been rejected because tagged commit %s'\
+                   ' is still under review in gerrit.\n Please re-trigger '\
+                   'submission after your change is accepted.'
+
+WRONG_COMMIT_MSG = '- The commit %s tag attached does NOT exist in git tree or'\
+                   ' gerrit open change.\n Please make sure the commit has been '\
+                   'pushed to gerrit and correct magical ref refs/for/branch, '\
+                   'then re-submit tag.'
+
+UNKNOWN_FORMAT_MSG = '- Unknown tag format,\n please follow the format '\
+                     'submit/{version}/{date.time}.'
+
+WRONG_FORMAT_MSG = '- Wrong tag format,\n please follow the format '\
+                   'submit/{branch}/{date.time}. \n'\
+                   'Git branch : %s. Tag branch: %s'
+
+NOT_ANNOTATED_MSG = '- Tag should be annotated tag.'
+
+SUGGESTION = 'Suggest to use "gbs submit" to trigger submission\n'\
+
+TITLE_FAILED = '[Submit Request Failed]: tag: %s in %s'
+
+TITLE_SUCCESS = '[Submit Request Success]: tag: %s in %s'
+
+PRERELEASE_EMAIL_BODY = 'A SR (Submit Request) has been triggered to submit ' \
+             'the commit to OBS project.\n' \
+             'Please check the following details.\n\n' \
+             'Git branch : %s\n' \
+             'Git commit : %s\n\n' \
+             'Git path : %s\n' \
+             'Git tag : %s\n' \
+             'Submitter : %s\n'
+
+EMAIL_FOOTER = '\n\n--------------------------------------------------------\n'\
+               'Automatically generated by backend service.\n'\
+               'Please DO NOT Reply!'
+
+def send_mail(title, msg, receiver):
+    """ post message back to gerrit and send mail to tag owner """
+    print 'msg %s' % msg
+    if 'author' in receiver and 'email' in receiver:
+        msg = 'Hi, %s,\n\n' % receiver['author'] + msg + EMAIL_FOOTER
+        prepare_mail("%s.env" % os.getenv('BUILD_TAG'), title, msg,
+                     os.getenv('NOREPLY_EMAIL_SENDER'), receiver['email'])
+
+def parse_submit_tag(tag):
+    """parse info from submit tag name"""
+
+    branch = None
+    date = None
+
+    if tag.startswith('submit/'):
+        pos = tag.rfind('/', len('submit/'))
+        if pos != -1:
+            branch = tag[len('submit/'):pos]
+            if branch == 'trunk':
+                branch = 'master'
+            date = tag[pos+1:]
+
+    return branch, date
+
+def find_submit_tag(event, mygit):
+    """find the corresponding submit tag for this event"""
+
+    if event['event_type'] == 'ref-updated':
+        tag = event['refname'][len('refs/tags/'):]
+        event['branch'] = parse_submit_tag(tag)[0]
+        # Since patchset_revision is used in gerrit feedback, real tag check
+        # is needed; and the key point is parse_submit_tag can not ensure the
+        # tag exsisting too.
+        try:
+            event['patchset_revision'] = mygit.rev_parse('%s^{commit}' % tag)
+        except GitRepositoryError:
+            tag = None
+    elif event['event_type'] == 'change-merged':
+        # for chanage-merged, search submit tag on this commit
+        branch = event['branch']
+        if event['branch'] == 'master':
+            branch = 'trunk'
+        try:
+            tag = mygit.describe(event['patchset_revision'],
+                                 pattern='submit/%s/*' % branch,
+                                 exact_match=True)
+        except GitRepositoryError:
+            # don'n find submit tag on this commit, return None
+            tag = None
+
+    return tag
+
+def check_tag_format(git, mygerrit, event, tag):
+    """check whether tag follow proper format"""
+
+    branch, date = parse_submit_tag(tag)
+    message = []
+    psr = event['patchset_revision']
+
+    # check tag name format
+    if branch and date:
+        # check date format
+        pattern = re.compile(r'^[0-9]{8}\.[0-9]{6}$')
+        if not pattern.match(date):
+            message.append(WRONG_DATE_MSG % date)
+
+        if not git.branch_contains(tag):
+            # Check if change is still under review
+            cmd = '--current-patch-set status: open project: %s commit: %s' % \
+                      (event['project'], psr)
+            gerritinfo = mygerrit.query(cmd)
+            if len(gerritinfo) == 1 and 'number' in gerritinfo[0] \
+                    and 'currentPatchSet' in gerritinfo[0]:
+                if gerritinfo[0]['branch'] == branch:
+                    # the tagged commit still open, abort submit this time
+                    message.append(UNDER_REVIEW_MSG % (tag, psr))
+            else:
+                # cannot find tagged commit in git tree or gerrit open change
+                message.append(WRONG_COMMIT_MSG % psr)
+        else:
+            # check for contains branch
+            contain = False
+            cbrch = git.branch_contains(tag)
+            if branch in cbrch:
+                print '%s branch is contains in %s branch' %(branch, cbrch)
+                contain = True
+            else:
+                vbrch = git_virtual_branch_map()
+                for items in vbrch:
+                    if '%s' %(branch) in items and \
+                        items['%s' %(branch)] is not None \
+                        and items['%s' %(branch)] in cbrch:
+                        print '%s branch is virtual %s branch' %(branch, items['%s' %(branch)])
+                        contain = True
+            if not contain:
+                # wrong tag format
+                print '%s branch is not contains in %s branch' %(branch, cbrch)
+                message.append(WRONG_FORMAT_MSG % (cbrch, branch))
+    else:
+        # wrong tag format
+        message.append(UNKNOWN_FORMAT_MSG)
+
+    # check whether tag is an annotated tag
+    tagger = git.get_tag(tag)
+    if 'author' not in tagger or 'email' not in tagger:
+        message.append(NOT_ANNOTATED_MSG)
+
+    # post comment to gerrit and send email if check failed
+    if message:
+        msg = 'The tag %s was pushed, but it was not completed because of '\
+                'the following reason(s):\n\n' % tag + '\n'.join(message)
+
+        if len(message) != 1 or (message[0] != UNDER_REVIEW_MSG % (tag, psr) \
+           and message[0] != WRONG_COMMIT_MSG % psr):
+            msg += '\n\n' + SUGGESTION
+
+        return False
+
+    return True
+
+def find_specfile(prj_dir, packaging_dir, tag, event, tagger, pkg_name=None):
+    """search specfile under packaging directory"""
+
+    msg = ''
+
+    if pkg_name:
+        spec = '%s/%s/%s.spec' % (prj_dir, packaging_dir, pkg_name)
+        if not os.path.isfile(spec):
+            msg = "The tag %s pushed, but backend service can not find %s \n"\
+                    "under packaging directory, which is caused by mistake \n"\
+                    "OBS_PACKAGE parameter in scm/git-obs-mapping project. \n"\
+                    "Please correct it or contact system administrator for \n"\
+                    "more details." % (tag, os.path.basename(spec))
+    else:
+        specs = utils.find_spec('%s/%s' % (prj_dir, packaging_dir))
+        if not specs:
+            # no spec exist under packaging, use default name
+            msg = "The tag %s pushed, but packaging directory doesn't \n"\
+                    "contain any spec file. Please create one and \n"\
+                    "re-submit it." % tag
+        elif len(specs) == 1:
+            # only one spec exist under packaging
+            spec = specs[0]
+        else:
+            # multiple specs exist under packaging, use default name
+            spec = '%s/%s/%s.spec' % (prj_dir, packaging_dir, \
+                    os.path.basename(event['project']))
+            if not os.path.isfile(spec):
+                msg = "The tag %s pushed, but packaging directory contains \n"\
+                        "multiply spec files, backend service can not decide \n"\
+                        "which spec file to use. Please use OBS_PACKAGE \n"\
+                        "parameter in scm/git-obs-mapping project to specify \n"\
+                        "the target spec file or contact system \n"\
+                        "administrator for more details." % tag
+
+    if msg:
+        print msg
+        return None
+    else:
+        print 'specfile %s' % spec
+        return spec
+
+
+def parse_specfile(specfile, tag, event, tagger):
+    """parse specfile"""
+
+    spec = None
+
+    try:
+        # use gbp to parse specfile
+        spec = SpecFile(specfile)
+    except GbpError, err:
+        print 'gbp parse spec failed. %s' % err
+        msg = 'The tag %s pushed, but backend service failed to parse %s. \n'\
+                'Please try "gbs export" on this tag and make sure it can '\
+                'work.\n\n'\
+                'Error message:\n'\
+                '%s' % (tag, os.path.basename(specfile), err)
+        print msg
+        return None
+
+    return spec
+
+class LocalError(Exception):
+    """Local error exception."""
+    pass
+
+
+def change_release_name(build, project, git_tag):
+    """
+    Change release name from project config in OBS
+    Add the datetime into release name.
+    Eg: 'Release: <CI_CNT>.<B_CNT>' ----> 'Release: 20141010.<CI_CNT>.<B_CNT>'
+    """
+    # get project config
+    config = build.get_project_config(project)
+    release_name = 'Release: %s' % (git_tag.split('/')[-1])
+    res = re.findall(r'^Release: ?\S+$', config, flags=re.MULTILINE)
+    if res:
+        if git_tag.split('/')[-1] not in res[0]:
+            note = '#Insert time from submission into release name\n'
+            release_name = '%s.%s' % (release_name,
+                                      res[0].split('Release:')[1].strip())
+            config = config.replace(res[0], '%s%s' % (note, release_name), 1)
+    else:
+        note = '#Add release name into prjconf\n'
+        config = note + '%s\n' % release_name + config
+
+    #Add rpmbuild stage option
+    if os.getenv('PRERELEASE_RPMBUILD_STAGE'):
+        # Check if we've got required fields in TRIGGER_INFO
+        if not os.getenv('PRERELEASE_RPMBUILD_STAGE') in ('ba', 'bb'):
+            print 'Error: PRERELEASE_RPMBUILD_STAGE %s' % (os.getenv('PRERELEASE_RPMBUILD_STAGE'))
+        else:
+            rpmbuildstage = 'Rpmbuildstage: %s' % (os.getenv('PRERELEASE_RPMBUILD_STAGE'))
+            res = re.findall(r'^Rpmbuildstage: ?\S+$', config, flags=re.MULTILINE)
+            if res:
+                config = config.replace(res[0], '%s' % (rpmbuildstage), 1)
+            else:
+                config = config + '#Add RpmbuildStage option into prjconf\n' + '%s\n' % (rpmbuildstage)
+
+    # Add "CopyLinkedPackages: yes" for prerelease projects.
+    if not re.search("CopyLinkedPackages:", config):
+      config = config + "\nCopyLinkedPackages: yes\n";
+
+    # set project config
+    build.set_project_config(project, config)
+
+def copy_person_project_meta(build, obs_target_prj, obs_project):
+    """copy the maintainer list from obs_target_prj meta to corresponding
+       prerelease project
+    """
+    src_meta_xml = build.get_meta(obs_target_prj)
+    src_xml_root = ElementTree.fromstringlist(src_meta_xml)
+    # get peron list from obs_target_prj meta
+    person_dict = {}
+    for person in src_xml_root.findall('person'):
+        if person.get('userid') in person_dict:
+            person_dict[person.get('userid')].append(person.get('role'))
+        else:
+            person_dict[person.get('userid')] = [person.get('role')]
+    # add person to prerelease project
+    if person_dict:
+        build.addPerson(obs_project, person_dict)
+
+def create_related_packages(build, obs_target_prj, obs_pre_prj, pre_package):
+    """create the 'link' package that relate the original package
+       obs_target_prj is the base project
+       obs_pre_prj is the prelease project
+       pre_package is the original package
+    """
+    sourceinfo = build.get_sourceinfo_list(obs_target_prj)
+    for package in sourceinfo:
+        if sourceinfo[package]:
+            link_prj, link_pkg = sourceinfo[package][-1].split('/')
+            if link_prj == obs_target_prj and link_pkg == pre_package:
+                build.create_link_pac(obs_pre_prj, pre_package, \
+                                       obs_pre_prj, package)
+        if re.search("_aggregate", package):
+            print "Copypac aggregate package: %s/%s" %(obs_pre_prj, package)
+            build.create_copy_pac(obs_target_prj, package, obs_pre_prj,\
+                package)
+            aggregate_file_name="_aggregate"
+            build.get_source_file(obs_target_prj, package, aggregate_file_name)
+            content = ""
+            with open(aggregate_file_name, 'r') as f:
+                content = f.read()
+            content_xml_root = ElementTree.fromstringlist(content)
+            for element in content_xml_root.findall('aggregate'):
+                element.set('project',obs_pre_prj)
+            content = ElementTree.tostring(content_xml_root)
+            with open(aggregate_file_name, 'w') as f:
+                f.write(content)
+            commit_msg="uploaded to copy pac %s/%s from %s" % (obs_pre_prj, package, obs_target_prj)
+            try:
+                build.commit_files(obs_pre_prj, package,
+                       [(aggregate_file_name, True)], commit_msg)
+            except ObsError, error:
+                raise UploadError("Unable to upload _aggregate to %s: %s" % \
+                    (obs_pre_prj, error))
+            print "Copypac done."
+
+def get_base_project(build, _project):
+    _meta = build.get_meta(_project)
+    xml_meta = ElementTree.fromstringlist(_meta)
+    _base_prj = ''
+    for repos in xml_meta.findall('repository'):
+        for path in repos.findall('path'):
+            if 'base' not in path.get('project').lower(): continue
+            print 'Found base project %s for %s' % (path.get('project'), \
+                                                    _project)
+            return path.get('project')
+    raise LocalError("Cannot determine base project for %s" % _project)
+
+def create_project(build, obs_project, args):
+    """Create prerelease OBS project and upload sources for the build."""
+
+    # Create review project if it doesn't exist
+    print "Creating temporary review OBS project %s" % obs_project
+
+    git_url = args[0]['url']
+    git_project = [ t['gerrit_project'] for t in (u for u in args) ]
+    git_tag = args[0]['git_tag']
+    git_revision = [ t['gerrit_newrev'] for t in (u for u in args) ]
+    obs_target_prj = args[0]['obs_target_prj']
+    submitter = ','.join(list(set([ t['submitter'] for t in (u for u in args) ])))
+    package = [ t['package'] for t in (u for u in args) ]
+    print '\nCREATING....%s\n%s\n%s\n%s\n%s\n' % (obs_project, git_project, git_revision, submitter, package)
+
+    info = {'projects': git_project,
+            'packages': package,
+            'obs_target_prj': obs_target_prj,
+            'git_tag': git_tag,
+            'git_commit': git_revision,
+            'obs_url': os.path.join(os.getenv('OBS_URL'), \
+                                    'project/show?project=%s' % obs_project),
+            'chksnap' : '0',
+            'images': [],
+            'base': get_base_project(build, obs_target_prj)}
+
+    if build.exists(obs_project):
+        if submitter:
+            info['submitter'] = submitter
+
+        # update project info
+        build.update_info(info, obs_project)
+        # unlink the project to upload packages
+        try:
+            build.unlink_project(obs_project)
+        except ObsError, error:
+            print 'Modify the meta conf to unlink failed: %s' % error
+    else:
+        if submitter:
+            info['submitter'] = escape(submitter)
+
+        if not build.exists(obs_target_prj):
+            raise LocalError("Target project %s doesn't exist" % obs_target_prj)
+        try:
+            build.create_project(obs_project, obs_target_prj,
+                                 description=json.dumps(info))
+        except ObsError, error:
+            LocalError("Unable to create project %s: %s" % (obs_project, error))
+    # change release name of project config in OBS
+    change_release_name(build, obs_project, git_tag)
+
+    #disable publish flag
+    build.disable_build_flag(obs_project, repo = None, flag="publish", status="disable")
+
+    #disable build flag
+    build.disable_build_flag(obs_project, repo = None, flag="build", status="disable")
+
+    try:
+        for idx, val in enumerate(git_project):
+            upload_obs_service(git_url, git_project[idx], git_tag,
+                               git_revision[idx], obs_project, build, package[idx])
+    except UploadError, err:
+        raise LocalError(err)
+
+#    build.link_project(obs_project, src=obs_target_prj, linktype="localdep")
+
+    # copy the maintainer list from obs_target_prj meta to corresponding
+    # prerelease project
+    copy_person_project_meta(build, obs_target_prj, obs_project)
+
+    #create the 'link' package that relate the original package
+    for idx, val in enumerate(package):
+        create_related_packages(build, obs_target_prj, obs_project, package[idx])
+
+#    build.link_project(obs_project, src=obs_target_prj, linktype="localdep")
+
+    #Wait 10 seconds to upload the package to the OBS
+    sleep(10)
+
+    #default build flag
+    build.default_build_flag(obs_project, repo = None, flag="build")
+
+    #default publish flag
+    build.default_build_flag(obs_project, repo = None, flag="publish")
+
+    # Enabled link project
+    sleep(1)
+    build.link_project(obs_project, src=obs_target_prj, linktype="localdep")
+
+def get_branch_name(tag):
+    """Get branch name by parsing info
+       from submit tag name.
+    """
+    branch = None
+    if tag.startswith('submit/'):
+        pos = tag.rfind('/', len('submit/'))
+        if pos != -1:
+            branch = tag[len('submit/'):pos]
+            if branch == 'trunk':
+                branch = 'master'
+    return branch
+
+queued_requests = {} # key would be project name (home:trbs:...)
+def enqueue_request(url, gerrit_project, git_tag, gerrit_newrev,
+                    build, obs_target_prj, project, submitter, package):
+    #TODO: Check group submits...
+    enqueue_item = {'url': url, \
+                    'gerrit_project': gerrit_project, \
+                    'git_tag': git_tag, \
+                    'gerrit_newrev': gerrit_newrev, \
+                    'obs_target_prj': obs_target_prj, \
+                    'project': project, \
+                    'submitter': submitter, \
+                    'package': package}
+    if project in queued_requests:
+        queued_requests[project].append(enqueue_item)
+    else:
+        queued_requests[project] = [enqueue_item]
+    print 'QUEUED... %s\n' % enqueue_item
+
+def process_requests(build):
+    fail_count = 0
+
+    for x in queued_requests:
+        print '\n[%s]:\n  %s' % (x, queued_requests[x])
+
+    for x in queued_requests:
+        args = queued_requests[x]
+        retry_count = 3
+        while retry_count > 0:
+            try:
+                create_project(build, x, args)
+                break
+            except Exception, err:
+                print '******* obs operation failed %s, retrying...' % err
+                sleep(10)
+        if not retry_count:
+            print 'retrying failed'
+            fail_count += 1
+
+    return fail_count
+
+
+def entry(action):
+
+    if len(action) > 1 and action[1] == 'bypass':
+        print '\n** Forward %s to group routine' % os.getenv('GERRIT_REFNAME')
+        with open('TRBS-SUBMIT-GROUP_%d.env' % int(os.getenv('BUILD_NUMBER')), 'w') as info_f:
+            for x in ['EVENT_TYPE', 'EVENT_HASH', 'REFNAME', 'PROJECT', 'OLDREV', 'NEWREV', \
+                      'EVENT_ACCOUNT', 'EVENT_ACCOUNT_NAME', 'EVENT_ACCOUNT_EMAIL']:
+                info_f.write('%s=%s\n' % ('GERRIT_' + x, os.getenv('GERRIT_' + x)))
+                print '    %s=%s' % (x, os.getenv('GERRIT_' + x))
+        return
+
+    obs_api = os.getenv("OBS_API_URL")
+    obs_user = os.getenv("OBS_API_USERNAME")
+    obs_passwd = os.getenv("OBS_API_PASSWD")
+
+    build = BuildService(obs_api, obs_user, obs_passwd)
+
+    # First store current build and all the rest queued builds
+    build_queue = []
+    job = os.getenv('JOB_NAME')
+    cred = {'url': os.getenv('REMOTE_TRBS_JENKINS_URL'), \
+            'username': os.getenv('REMOTE_TRBS_JENKINS_USER'), \
+            'password': os.getenv('REMOTE_TRBS_JENKINS_PW')}
+    jenkinsinst = get_jenkins_instance(job, cred)
+    curr_build = jenkinsinst.get_job(job).get_build(int(os.getenv('BUILD_NUMBER')))
+    up_id = curr_build.get_upstream_build_number()
+    if up_id is None: up_id = 0
+    curr_data = {}
+    for k in curr_build.get_actions()['parameters']:
+        curr_data[k['name']] = k['value']
+    build_queue.append({'%d-%d' % (up_id, curr_build._data['queueId']): curr_data})
+    #TODO: Keep original process
+    if up_id != 0:
+        jq = jenkinsinst.get_queue()
+        for pending_build in jq.get_queue_items_for_job(os.getenv('JOB_NAME')):
+            for action in pending_build._data.get('actions', []):
+                if type(action) is dict and 'causes' in action:
+                    up_id = [x.get('upstreamBuild', None) for x in action['causes']][0]
+                    if up_id is None: up_id = 0
+                    break
+            build_queue.append({'%d-%d' % (up_id, pending_build.queue_id): pending_build.get_parameters()})
+            print '\n    %s DELETED: %s' % (pending_build.queue_id, pending_build.get_parameters())
+            jq.delete_item_by_id(pending_build.queue_id)
+
+    build_queue = sorted(build_queue)
+    print '\nBelow will be proceeded: "%s"' % [int(k.keys()[0].split('-')[0]) for k in build_queue]
+    for k in build_queue:
+        item = k[k.keys()[0]]
+        print '%s : %s(%s)' % (k.keys()[0], \
+                               item['GERRIT_PROJECT'], \
+                               item['GERRIT_NEWREV'])
+
+    event = get_gerrit_event("PUBLIC_")
+    sr_count = 1
+    for k in build_queue:
+        item = k[k.keys()[0]]
+        #TODO: Just forward it to main routine. (Merging group submits need target project name)
+        print item['GERRIT_PROJECT'], item['GERRIT_NEWREV']
+        event.update({'project': item['GERRIT_PROJECT'], \
+                      'refname': item['GERRIT_REFNAME'], \
+                      'oldrev': item['GERRIT_OLDREV'], \
+                      'newrev': item['GERRIT_NEWREV'], \
+                      'event_account': item['GERRIT_EVENT_ACCOUNT'], \
+                      'event_account_name': item['GERRIT_EVENT_ACCOUNT_NAME'], \
+                      'event_account_email' : item['GERRIT_EVENT_ACCOUNT_EMAIL'], \
+                      'event_type': item['GERRIT_EVENT_TYPE'], \
+                      'event_hash': item['GERRIT_EVENT_HASH']})
+        main(build, event, sr_count)
+        sr_count += 1
+
+    return process_requests(build)
+
+def main(build, event, sr_count):
+    """
+    Script entry point.
+    """
+    print '---[JOB STARTED]-------------------------'
+    buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+    print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
+    if buildmonitor_enabled:
+        bm_start_datetime = datetime.datetime.now()
+    # Triggered by Gerrit - use GERRIT_REFNAME to get tag
+    git_tag = event['refname'].split("refs/tags/")[1]
+    git_branch = get_branch_name(git_tag)
+
+    # prepare separate temp directory for each build
+    gerrit_env = GerritEnv("PUBLIC_")
+    git_cache = gerrit_env.gitcache
+    prjdir = os.path.join(git_cache, event['project'])
+
+    # clone gerrit project to local dir
+    if not clone_gitproject(event['project'], prjdir, \
+                            gerrit_hostname=gerrit_env.hostname, \
+                            gerrit_username=gerrit_env.username, \
+                            gerrit_sshport=gerrit_env.sshport
+                            ):
+        print >> sys.stderr, 'Error cloning %s' % event['project']
+        return 1
+    mygit = Git(prjdir)
+
+    # check whether tag name is start with 'submit/'
+    if not event['refname'].startswith('refs/tags/submit/'):
+        print '\nREFNAME "%s" isn\'t start with refs/tags/submit, exit now'\
+              % event['refname']
+        return 0
+    elif is_ref_deleted(event['oldrev'], event['newrev']):
+        print '\nREFNAME "%s" is deleted, exit now' % event['refname']
+        return 0
+
+    mygerrit = Gerrit(gerrit_env.hostname, gerrit_env.username, \
+               gerrit_env.sshport, int(os.getenv('GERRIT_SILENT_MODE')))
+
+    tag = find_submit_tag(event, mygit)
+    if not tag:
+        print '\nThis commit don\'t contain submit/*/* tag, exit now'
+        return 0
+
+    submitter = ''
+    if event['event_account_name']:
+        submitter = event['event_account_name']
+        if event['event_account_email']:
+            submitter += ' <%s>' % event['event_account_email']
+
+
+    # check whether tag meet format
+    if not check_tag_format(mygit, mygerrit, event, tag):
+        print 'The check for the tag format is error, exit now\n'
+        return 0
+
+    packagingdir = utils.parse_link('%s/%s' % (prjdir, 'packaging'))
+    print 'packaging dir is %s/%s' % (prjdir, packagingdir)
+
+    # checkout submit tag
+    mygit.checkout(tag)
+
+    tagger = mygit.get_tag(tag)
+
+    # precheck tpk branch (ABS)
+    if '_tpk' in git_branch:
+        git_branch = git_branch.replace('_tpk','')
+
+    # get list of target projects from git-obs-mapping
+    obs_target_prjs = git_obs_map(event['project'], git_branch, \
+                                  gitcache=gerrit_env.gitcache, \
+                                  gerrit_hostname=gerrit_env.hostname, \
+                                  gerrit_username=gerrit_env.username, \
+                                  gerrit_sshport=gerrit_env.sshport
+                                  )
+
+    # get list of ref projects from git-ref-mapping
+    obs_ref_prjs = get_ref_map(event['project'], git_branch, \
+                               gitcache=gerrit_env.gitcache, \
+                               gerrit_hostname=gerrit_env.hostname, \
+                               gerrit_username=gerrit_env.username, \
+                               gerrit_sshport=gerrit_env.sshport
+                               )
+
+    # prepare submit_info
+    submit_info={}
+    submit_info['submitted'] = {
+                "branch": git_branch,
+                "commit_id": event['newrev'],
+                "gitpath": event['project'],
+                "submitter": event['event_account_name'],
+                "submitter_email": event['event_account_email'],
+                "tag": git_tag,
+                }
+    submit_info['pre_created'] = []
+
+    #get project list
+    project_list = build.get_package_list("")
+    #print 'project list = %s' %(project_list)
+
+    for target in obs_target_prjs:
+        enable_build = True
+        obs_target_prj = target['OBS_project']
+        obs_stg_prj = target['OBS_staging_project']
+        obs_pkg = target['OBS_package']
+        if 'config' in target and target['config'] is not None and target['config'].build == 'no':
+            print 'Set disable build for %s' % (obs_target_prj)
+            enable_build = False
+        if enable_build == False:
+            print 'Skip TRBS build due to enable_build flag is %s' % enable_build
+            continue
+
+        url = 'ssh://%s:%s' % (gerrit_env.hostname,
+                               gerrit_env.sshport)
+        # get package name from xml files.
+        if obs_pkg:
+            package = obs_pkg
+        else:
+            # get package name from gerrit_project
+            package = os.path.basename(event['project'])
+
+        if True:
+            if obs_stg_prj != 'prerelease' and obs_stg_prj != 'abs':
+                print "%s is not 'prerelease' project in git-obs-mapping.xml." \
+                      "It has been switched to submitobs workflow." \
+                      "Skipping ..." % obs_stg_prj
+                continue
+
+            # Skip ABS project if selected branch is not _tpk.
+            if obs_stg_prj == 'abs' and not get_branch_name(git_tag).endswith('_tpk'):
+                print "(%s) is an ABS project but the branch(%s) is not _tpk." \
+                      % (package, get_branch_name(git_tag))
+                continue
+
+            if not trbs_enabled(obs_target_prj):
+                print 'Skipping %s as trbs is not enabled for it' % \
+                       obs_target_prj
+                continue
+
+            # Check for Group submit
+            group_prj = [ obj for obj in project_list if is_trbs_project(obj) \
+                          and not obj.find(obs_target_prj) == -1 \
+                          and not obj.find(git_tag.replace('/',':')) == -1 ]
+
+            if not group_prj:
+                # ref project
+                # obs_target_prj is src and obs_ref_prjs is ref project
+                ref_prj_enable = False
+                for reftarget in obs_ref_prjs:
+                    target_prj = reftarget['OBS_project']
+                    ref_prj = reftarget['OBS_staging_project']
+                    pkg = reftarget['OBS_package']
+                    if obs_target_prj == target_prj:
+                        obs_target_prj = ref_prj
+                        ref_prj_enable = True
+                        break
+
+                if not ref_prj_enable:
+                    print 'Skipping %s as trbs(git-ref-mapping.xml) is not enabled for it' % \
+                           obs_target_prj
+                    continue
+                project = get_trbs_project_name(obs_target_prj, git_tag)
+            else:
+                # is group submission
+                project = group_prj[0]
+                # Make build id from latest snapshot + project suffix
+                ref_project = get_ref_prj_from_trbs_name(project, git_tag)
+                obs_target_prj = ref_project
+                print 'is group submission : %s target : %s ' %(project, obs_target_prj)
+
+            if is_ref_deleted(event['oldrev'], event['newrev']):
+                build.cleanup(project, "Cleaned by job_submit")
+            else:
+                submitter = ''
+                if event['event_account_name']:
+                    submitter = event['event_account_name']
+                    if event['event_account_email']:
+                        submitter += ' <%s>' % event['event_account_email']
+
+                enqueue_request(url, event['project'], git_tag, event['newrev'],
+                                build, obs_target_prj, project, submitter, package)
+                if True:
+                        # prepare submit_info
+                        if project:
+                            products = submit_info['pre_created'] or []
+                            if not obs_target_prj in products:
+                                submit_info['pre_created'].append(project)
+                            else:
+                                submit_info['pre_created'] = products
+
+            # Disable triggerring make_dep_graph.
+            # Code remained to enable it in the future
+            #data={}
+            #data['obs_project'] = project
+            #data['obs_linked_project'] = obs_target_prj
+            #data['action'] = "prerelease_build_progress"
+            #trigger_next('make_dep_graph', data)
+
+    if buildmonitor_enabled and submit_info['pre_created']:
+        bm_end_datetime = datetime.datetime.now()
+        commit_date, cd_err, cd_ret = mygit._git_inout('show', ['-s', '--format=%ci'])
+        commit_msg, cm_err, cm_ret = mygit._git_inout('show', ['-s', '--format=%s'])
+        submit_date, sd_err, sd_ret = mygit._git_inout('for-each-ref', \
+                    ['--count=1', '--sort=-taggerdate', '--format=%(taggerdate:iso)'])
+        submit_msg, sm_err, sm_ret = mygit._git_inout('for-each-ref', \
+                    ['--count=1', '--sort=-taggerdate', '--format=%(subject)'])
+        submitter, st_err, st_ret = mygit._git_inout('for-each-ref',\
+                    ['--count=1', '--sort=-taggerdate', '--format=%(taggeremail)'])
+        #print '[%s] %s%s' % (__file__, commit_date, commit_msg)
+        #print '[%s] %s%s%s' % (__file__, submit_date, submit_msg, submitter)
+
+        ### sr_status -> sr_commit -> sr_stage & sr_status_detail
+        #buildmonitor.sr_submit_for_sr_status(git_tag)
+        #buildmonitor.sr_submit_for_sr_commit(mygit, git_tag, gerrit_project,
+        #                                     gerrit_newrev, gerrit_account_name)
+        #buildmonitor.package_build_for_sr_detail_sr_stage(git_tag, bm_start_datetime,
+        #                                                  bm_end_datetime,
+        #                                                  submit_info['pre_created'])
+        bm_stage = 'Submit'
+        bm_data = {"bm_stage": bm_stage,
+                   "commit_date" : commit_date,
+                   "commit_msg" : commit_msg,
+                   "submit_date" : submit_date,
+                   "submit_msg" : submit_msg,
+                   "submitter" : submitter,
+                   "git_tag" : git_tag,
+                   "gerrit_project" : event['project'],
+                   "gerrit_newrev" : event['newrev'],
+                   "gerrit_account_name" : event['event_account_name'],
+                   "bm_start_datetime": str(bm_start_datetime),
+                   "bm_end_datetime": str(bm_end_datetime),
+                   "bm_src_project_lst": submit_info['pre_created']
+                    }
+        ### sr_status -> sr_commit -> sr_stage & sr_status_detail
+        trigger_next("BUILD-MONITOR_%d" % sr_count, bm_data)
+    else:
+        print '[%s] submit_info data is NULL(%s)\n' \
+              % (__file__, submit_info['pre_created'])
+
+if __name__ == '__main__':
+    try:
+        sys.exit(entry(sys.argv))
+    except Exception as err:
+        print err
+        sys.exit(1)
+
diff --git a/trbs/job_trbs_sync_aws.py b/trbs/job_trbs_sync_aws.py
new file mode 100755 (executable)
index 0000000..ae0266a
--- /dev/null
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+import sys
+import os
+import re
+import errno
+import subprocess
+
+import shutil, tempfile, atexit
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.buildtrigger import trigger_info
+
+def execute_shell(cmd, progress=False):
+    print "[INFO] command : %s" % cmd
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    if progress:
+        line_iterator = iter(proc.stdout.readline, b"")
+        for line in line_iterator:
+            print "    %s" % line[:-1]
+    out, err = proc.communicate()
+    if cmd.startswith("rsync"):
+        if err:
+            print "stderr: %s" % err
+            return 'err'
+
+    if err:
+        print "stderr: %s" % err
+        return None
+
+    o = out.strip().split('\n')
+    print "o: %s" % o
+    if len(o) == 1:
+        if o[0] == '':
+            return None
+    return o
+
+def main():
+
+    print '---[JOB STARTED]-------------------------'
+
+    content = trigger_info(os.getenv("TRIGGER_INFO"))
+
+    if os.getenv("TRBS_SYNC_AWS_ENABLED", "0") == "0":
+       return
+
+    if 'download_url' in content:
+        repo_path = content['download_url'].replace(os.getenv('URL_PUBLIC_REPO_BASE'),
+                                                    os.getenv('TRBS_PATH_REPO_BASE'))
+    if 'remove_path' in content:
+        repo_path = content['remove_path']
+    if 'repo_path' in content:
+        repo_path = os.path.join(os.getenv('TRBS_PATH_REPO_BASE'),
+                                 content['repo_path'])
+
+    sync_aws_dest_base = '%s:%s' %(os.getenv('TRBS_SYNC_AWS_DEST_BASE'), repo_path)
+    cmd = 'rsync --compress --stats --archive --recursive --hard-links --delete --verbose --exclude="source" %s/ %s/' \
+             % (repo_path, sync_aws_dest_base)
+    print "rsync command: %s" % cmd
+    if execute_shell(cmd, progress=True) == 'err':
+        cmd = 'rsync --compress --stats --archive --recursive --hard-links --delete --verbose --exclude="source" %s/ %s/' \
+             % (os.path.dirname(repo_path), os.path.dirname(sync_aws_dest_base))
+        print "rsync command: %s" % cmd
+        if execute_shell(cmd, progress=True) == 'err':
+            raise Exception('[Error] rsync failed.')
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/trbs/job_trbs_test_automation.py b/trbs/job_trbs_test_automation.py
new file mode 100755 (executable)
index 0000000..1002dc4
--- /dev/null
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2
+#    of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+"""This script is used to update infomation about images"""
+
+import os
+import sys
+import base64
+import json
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.gerrit import get_gerrit_event, GerritEnv
+from common.git import Git, clone_gitproject
+from common.buildtrigger import trigger_next, trigger_info, trigger_jenkins_build, remote_jenkins_build_job
+
+GIT_FILE_NAME = 'prerelease.description'
+
+def main():
+    """The main body"""
+
+    trigger_info_data = os.getenv('TRIGGER_INFO')
+
+    if trigger_info_data:
+        print 'TRBS test trigger\n'
+        content = trigger_info(os.getenv('TRIGGER_INFO'))
+
+        # Check if we've got required field in TRIGGER_INFO
+        for field in ('git_tag', 'download_url', 'projects', 'obs_target_prj'):
+            if field not in content:
+                print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+                return -1
+
+        test_enabled = os.getenv("TEST_AUTOMATION_ENABLED", "0") != "0"
+        if test_enabled:
+            # trigger sync obs job
+            trigger_jenkins_build(os.getenv('TEST_JOB_NAME'), \
+                                 {'TRIGGER_INFO': os.getenv('TRIGGER_INFO')}, \
+                                 cred={'url': os.getenv('TEST_JENKINS_URL'), \
+                                  'username': os.getenv('TEST_JENKINS_USER'), \
+                                  'password': os.getenv('TEST_JENKINS_PW')}, \
+                                 block=False
+                                 )
+        # TRIGGER NEXT SYNC-AWS
+        if os.getenv("TRBS_SYNC_AWS_ENABLED", "0") != "0":
+            trigger_next('SYNC-AWS', content)
+    else:
+        print 'PUBLIC test trigger\n'
+
+        # check the envs
+        if not os.getenv("PUBLIC_GIT_CACHE_DIR") or \
+           not os.getenv("GERRIT_NEWREV") or \
+           not os.getenv("PUBLIC_TEST_AUTOMATION_ENABLED") or \
+           not os.getenv("PUBLIC_TEST_JENKINS_URL") or \
+           not os.getenv("PUBLIC_TEST_JENKINS_USER") or \
+           not os.getenv("PUBLIC_TEST_JENKINS_API_TOKEN") or \
+           not os.getenv("PUBLIC_TEST_JENKINS_JOB_NAME"):
+            print 'need to check envs!!\n'
+            return
+
+        events = get_gerrit_event('PUBLIC_')
+        gerrit_env = GerritEnv('PUBLIC_')
+        git_cache = os.getenv('PUBLIC_GIT_CACHE_DIR')
+        git_prjdir = os.path.join(git_cache, events['project'])
+
+        # clone_gitproject
+        if not clone_gitproject(events['project'], \
+                                git_prjdir,
+                                gerrit_hostname=gerrit_env.hostname, \
+                                gerrit_username=gerrit_env.username, \
+                                gerrit_sshport=gerrit_env.sshport):
+           print >> sys.stderr, 'Error cloning %s' % event['project']
+           return 1
+
+        # get prerelease_description_dict data
+        mygit = Git(git_prjdir)
+        commit_id = os.getenv("GERRIT_NEWREV")
+        arg = commit_id + ':' + GIT_FILE_NAME
+
+        prerelease_description_str = mygit.show(arg)
+        prerelease_description_dict = json.loads(prerelease_description_str)
+        print json.dumps(prerelease_description_dict, indent=4)
+
+        url_lst = os.getenv("PUBLIC_TEST_JENKINS_URL").split(',')
+        user_lst = os.getenv("PUBLIC_TEST_JENKINS_USER").split(',')
+        token_lst = os.getenv("PUBLIC_TEST_JENKINS_API_TOKEN").split(',')
+        name_lst = os.getenv("PUBLIC_TEST_JENKINS_JOB_NAME").split(',')
+
+        # remote trigger by using file param
+        files_dict = {}
+        for each_url, each_user, each_token, each_name in zip(url_lst, user_lst, token_lst, name_lst):
+            #print 'each_url(%s), each_user(%s), each_token(%s), each_name(%s)\n' \
+            #      % (each_url, each_user, each_token, each_name)
+            fname = 'TRIGGER_INFO_FILE_' + each_name
+
+            with open(fname, 'w') as f:
+                f.write(base64.b64encode(json.dumps(prerelease_description_dict)))
+
+            files_dict[each_name] = [(fname, fname)]
+
+            public_test_enabled = int(os.getenv("PUBLIC_TEST_AUTOMATION_ENABLED"))
+            if public_test_enabled:
+               remote_jenkins_build_job(each_url, each_user, each_token, each_name,
+                                        None, None, files_dict[each_name])
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/trbs/job_trbs_update_git.py b/trbs/job_trbs_update_git.py
new file mode 100755 (executable)
index 0000000..4b77056
--- /dev/null
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+#    This program is free software; you can redistribute it and/or
+#    modify it under the terms of the GNU General Public License
+#    as published by the Free Software Foundation; version 2
+#    of the License.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+"""This script will pull latest change to local when remote ref updated..
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.join(sys.path[0], '..'))
+
+from common.gerrit import get_gerrit_event, GerritEnv
+from common.git import clone_gitproject
+
+def main():
+    """The main body"""
+
+    print '---[JOB STARTED]----------------------------------------'
+
+    events = get_gerrit_event('TRBS_')
+
+    gerrit_env = GerritEnv('TRBS_')
+
+    # clone gerrit project to local git cache dir
+    clone_gitproject(events['project'], '%s' % \
+            os.path.join(gerrit_env.gitcache, events['project']),
+            bare=True, \
+            gerrit_hostname=gerrit_env.hostname, \
+            gerrit_username=gerrit_env.username, \
+            gerrit_sshport=gerrit_env.sshport)
+
+if __name__ == '__main__':
+    sys.exit(main())