--- /dev/null
+#!/usr/bin/env python
+
+import sys
+import os
+import tempfile
+import atexit
+import shutil
+import urllib2
+import subprocess
+import gzip
+import re
+from common.git import Git, clone_gitproject
+from common.gerrit import Gerrit, get_gerrit_event, GerritError, is_ref_deleted
+from common.buildservice import BuildService
+
+# prepare related global variables
+workspace = os.getenv('WORKSPACE')
+basic_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/snapshots/tizen/'
+gbs_meta_default_profile = os.getenv('GBS_META_DEFAULT_PROFILE')
+TIZEN_PROFILE = os.getenv('TIZEN_PROFILE')
+REPOSITORY = os.getenv('REPOSITORY')
+ARCHITECTURE = os.getenv('ARCHITECTURE')
+#gbs_ref_fullbuild_root = workspace
+gbs_ref_fullbuild_root = '/data/jenkins_fullbuild/'
+BUILD_ROOT = gbs_ref_fullbuild_root + '/GBS-ROOT/'
+SRC_ROOT = gbs_ref_fullbuild_root + '/SRC-ROOT/'
+gbs_default_build_arg='timeout 6h gbs build --threads=28 --define "jobs 8" --define "_smp_mflags -j8" --baselibs --clean-once'
+mic_default_build_arg='sudo mic --non-interactive cr auto '
+ref_profile={}
+all_repo_arch_build=0
+
+
+
+class LocalError(Exception):
+ """Local error exception."""
+ pass
+
+def _update_ref_bin_index(ref_binary):
+ """ Update Reference Binary Index"""
+ print "-----------------------------------------------------"
+ print "Update Reference Binary Index"
+ print "-----------------------------------------------------"
+
+ # prepare separate temp directory for each build
+ git_prj = 'scm/meta/obs'
+ git_branch="master"
+ tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+ atexit.register(shutil.rmtree, tmpdir)
+ prjdir = os.path.join(tmpdir, git_prj)
+
+ # clone gerrit project to local dir
+ if not clone_gitproject(git_prj, prjdir):
+ print >> sys.stderr, 'Error cloning %s' %git_prj
+ return 1
+ mygit = Git(prjdir)
+ mygit.checkout(git_branch)
+
+ for obs_prj in ref_binary.keys():
+ if obs_prj != TIZEN_PROFILE:
+ continue
+ print '\nobs_prj: %s' %obs_prj
+ profile = obs_prj.split(':')[-1].lower()
+
+ if obs_prj.split(':')[-2] == 'Tizen':
+ t_ver = ''
+ t_ver_path = '/'
+ t_branch = 'tizen'
+ else:
+ t_ver = obs_prj.split(':')[-2] + '-'
+ t_ver_path = '/' + obs_prj.split(':')[-2] + '/'
+ t_branch = 'tizen_'+obs_prj.split(':')[-2]
+
+ basic_snapshot_url = basic_url + t_ver + profile + "/"
+ ref_snapshot_number = ref_binary[obs_prj].split('ref:')[1]
+ if ref_snapshot_number == 'latest':
+ ref_snapshot_url = basic_snapshot_url + ref_snapshot_number + "/"
+ else:
+ ref_snapshot_url = basic_snapshot_url + "tizen-" + t_ver + profile + "_" + ref_snapshot_number + "/"
+
+ repository = []
+ arch_list= {}
+ obs_meta_file = prjdir + '/' + obs_prj + '/_meta'
+ lines = open(obs_meta_file).readlines()
+ for line in lines:
+ if line.find('repository name=') != -1:
+ repo_tmp=line.split('"')[1]
+ repository.append(repo_tmp)
+ arch_list[repo_tmp] = []
+ if line.find('<arch>') != -1:
+ arch_tmp = line.split("<arch>")[1].split("</arch>")[0]
+ arch_list[repo_tmp].append(arch_tmp)
+
+ ref_profile['profile']=profile
+ ref_profile['obs_prj']=obs_prj
+ ref_profile['t_ver']=t_ver
+ ref_profile['t_ver_path']=t_ver_path
+ ref_profile['t_branch']=t_branch
+ ref_profile['basic_snapshot_url']=basic_snapshot_url
+ ref_profile['ref_snapshot_number'] = ref_snapshot_number
+ ref_profile['ref_snapshot_url'] = ref_snapshot_url
+ ref_profile['repository'] = repository
+ ref_profile['arch_list'] = arch_list
+
+ print 'reference profile %s' %ref_profile
+
+
+def replace_depanneur_file():
+ tmp_python_file=workspace+'/tmp_python'
+ pytext="""
+#!/usr/bin/env python
+
+import sys
+import os
+import subprocess
+
+def replace_depanneur():
+ print 'start'
+ patch1='$packs_queue->enqueue({\\n\\
+ filename => "$pkg_path/$cache_key/$spec_file",\\n\\
+ project_base_path => $base,\\n\\
+ });'
+ origin1='push(@packs, {\\n\\
+ filename => "$pkg_path/$cache_key/$spec_file",\\n\\
+ project_base_path => $base,\\n\\
+ });'
+ patch2='my $packs_queue = Thread::Queue->new();\\n\\
+\\tmy $data_queue = Thread::Queue->new();\\n\\
+\\tforeach my $pack (@pre_packs) {\\n\\
+ if ($not_export_source == 1) {\\n\\
+ my $name = basename($pack->{"project_base_path"});\\n\\
+ my $r = grep /^$name$/, @not_export;\\n\\
+ if ($vmtype eq "kvm") {\\n\\
+ $r = 0;\\n\\
+ }\\n\\
+ if ($r) {\\n\\
+ info("skip export $name for accel...");\\n\\
+ push @packs, $pack;\\n\\
+ } else {\\n\\
+ info("package $name not support skip export source");\\n\\
+ $data_queue->enqueue($pack);\\n\\
+ }\\n\\
+ } else {\\n\\
+ $data_queue->enqueue($pack);\\n\\
+ }\\n\\
+\\t}\\n\\
+\\n\\
+\\tmy $thread_num = int(sysconf(SC_NPROCESSORS_ONLN));\\n\\
+\\tfor (0..$thread_num) {\\n\\
+\\t\\t$data_queue->enqueue(undef);\\n\\
+\\t\\tthreads->create(sub {\\n\\
+\\t\\t\\twhile (my $pack = $data_queue->dequeue()) {\\n\\
+\\t\\t\\t\\tprepare_git($config, $pack->{"project_base_path"}, $pack->{"filename"}, $packs_queue);\\n\\
+\\t\\t\\t}\\n\\
+\\t\\t});\\n\\
+\\t}\\n\\
+\\tforeach (threads->list()) { $_->join(); }\\n\\
+\\t# Check error\\n\\
+\\tforeach (threads->list()) {\\n\\
+\\t\\tif (my $chk_err = $_->error()){\\n\\
+\\t\\t\\twarning("export thread error: $chk_err\\\\n");\\n\\
+\\t\\t}\\n\\
+\\t}\\n\\
+\\t$packs_queue->enqueue(undef);\\n\\
+\\twhile (my $pack = $packs_queue->dequeue()) {\\n\\
+\\t\\tpush @packs, $pack;\\n\\
+\\t}'
+
+ origin2='foreach my $pack (@pre_packs) {\\n\\
+ prepare_git($config, $pack->{"project_base_path"}, $pack->{"filename"});\\n\\
+ }'
+
+ depanneur_file='/usr/bin/depanneur'
+
+ f = open(depanneur_file,'rb')
+ text = f.read()
+ if text.find(patch1) != -1 and text.find(patch2) != -1:
+ print depanneur_file+' will be replaced'
+ f.close()
+ new_text=text.replace(patch1,origin1).replace(patch2,origin2)
+ f = open(depanneur_file,'wb')
+ f.write(new_text)
+ f.close()
+
+def main():
+ replace_depanneur()
+
+if __name__ == "__main__":
+ sys.exit(main())
+"""
+
+ f = open(tmp_python_file,'wb')
+ f.write(pytext)
+ f.close()
+
+ cmd='sudo python '+tmp_python_file
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+
+def _do_repo_init_sync(repo):
+ print '----start _do_repo_init_sync-----------------------------------'
+
+ repo_src_root=SRC_ROOT+'/'+repo
+ if not os.path.exists(repo_src_root):
+ os.mkdir(repo_src_root)
+ os.chdir(repo_src_root)
+
+ #add '-u' option
+ repo_init_arg = ' -u https://git.tizen.org/cgit/scm/manifest'
+
+ #add '-b' option
+ repo_init_arg += ' -b '+ref_profile['t_branch']
+
+ #add '-m' option
+ repo_init_arg += ' -m '+ref_profile['profile']+'_'+repo+'.xml'
+
+ #do repo init
+ cmd = 'repo init' + repo_init_arg
+ print 'repo init cmd: %s' %cmd
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+ #do repo sync
+ print 'do repo sync'
+ cmd = 'repo sync'
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+def ___get_index_file_name(repo,arch):
+ profile_list=os.listdir(BUILD_ROOT+'/local/repos/')
+ for profile in profile_list:
+ if profile.find(repo) != -1:
+ profile_path=profile
+
+ index_file=BUILD_ROOT+'/local/repos/'+profile_path+'/'+arch+'/index.html'
+
+ return index_file
+
+def __is_gbs_build_finished(repo,arch):
+ index_file=___get_index_file_name(repo,arch)
+ if os.path.exists(index_file):
+ return 1
+
+ return 0
+
+def __is_gbs_fullbuild_result_fail(repo,arch):
+ print '----start __is_gbs_fullbuild_result_fail-----------------------------------'
+
+ index_file=___get_index_file_name(repo,arch)
+
+ f = open(index_file,'rb')
+ build_result=f.read()
+ f.close()
+
+ #summary is contents between 'Build Status Summary' and 'Build Statis Details'
+ summary=build_result.split('Build Status Summary')[1].split('Build Statis Details')[0]
+
+ total_pkg_num=summary.split('<td>')[1]
+ succeeded_pkg_num=summary.split('<td>')[2]
+
+ if total_pkg_num == succeeded_pkg_num:
+ print 'GBS fullbuild succeeded'
+ return 0
+ else:
+ print 'There are errors on gbs fullbuild'
+ return 1
+
+def _do_repo_arch_gbs_fullbuild(repo,arch):
+ print '----start _do_repo_arch_gbs_fullbuild-----------------------------------'
+
+ #add arch
+ gbs_build_arg = ' -A '+arch
+
+ #add build root
+ gbs_build_arg += ' -B '+BUILD_ROOT
+
+ cmd = gbs_default_build_arg+gbs_build_arg+' | awk \'{ print strftime("%Y-%m-%d %H:%M:%S"), $0; fflush(); }\''
+ print 'gbs build argument is %s' %cmd
+
+ retry_cnt=3
+ while retry_cnt > 0:
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+ if __is_gbs_build_finished(repo,arch):
+ print 'gbs fullbuild for repo:%s arch:%s is finished.' %(repo,arch)
+ break
+ else:
+ print 'gbs fullbuild for repo:%s arch:%s is stucked. retrying....' %(repo,arch)
+
+ retry_cnt -= 1
+
+ if __is_gbs_fullbuild_result_fail(repo,arch):
+ raise LocalError('There are errors on GBS fullbuild for repo:%s, arch:%s' %(repo,arch))
+
+
+def do_gbs_build():
+ print '----start do_gbs_build-----------------------------------'
+
+ if os.path.exists(BUILD_ROOT):
+ cmd = 'sudo rm -rf '+BUILD_ROOT
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+ if not os.path.exists(SRC_ROOT):
+ os.mkdir(SRC_ROOT)
+
+ if all_repo_arch_build == 1:
+ for repo in ref_profile['repository']:
+ _do_repo_init_sync(repo)
+ for arch in ref_profile['arch_list'][repo]:
+ print 'OBS Project: %s, repository: %s, architecture: %s gbs fullbuild start' %(ref_profile['obs_prj'],repo,arch)
+ _do_repo_arch_gbs_fullbuild(repo,arch)
+ else:
+ _do_repo_init_sync(REPOSITORY)
+ _do_repo_arch_gbs_fullbuild(REPOSITORY,ARCHITECTURE)
+
+ #shutil.rmtree(SRC_ROOT, ignore_errors=False, onerror=None)
+
+def _ks_add_local_repo(remote_ks_content,repo):
+ print '----start _ks_add_local_repo-----------------------------------'
+
+ if ref_profile['ref_snapshot_url'].split('/')[-1]:
+ ref_snapshot_build_id = ref_profile['ref_snapshot_url'].split('/')[-1]
+ else:
+ ref_snapshot_build_id = ref_profile['ref_snapshot_url'].split('/')[-2]
+
+ remote_ks_content=remote_ks_content.replace('@BUILD_ID@',ref_snapshot_build_id)
+ profile_list=os.listdir(BUILD_ROOT+'/local/repos/')
+ for profile in profile_list:
+ if profile.find(repo) != -1:
+ profile_path=profile
+
+ local_repo = []
+ gbs_built_rpm_path=os.listdir(BUILD_ROOT+'/local/repos/'+profile_path)
+ print 'gbs built rpm path: %s' %gbs_built_rpm_path
+ for arch in gbs_built_rpm_path:
+ local_repo.append({'baseurl':'--baseurl=file://'+BUILD_ROOT+'/local/repos/'+profile+'/'+arch+'/'})
+ local_repo[-1]['name']='--name=local_'+arch
+ print 'local repo[] = %s' %local_repo
+
+ local_repo_line=''
+ for line in remote_ks_content.split('\n'):
+ if line.find('repo --name=') != -1 and line.find(ref_profile['profile']) != -1:
+ print 'psk-test %s' %line
+ for arg in line.split(' '):
+ if arg.find('baseurl=') != -1:
+ remote_url=arg
+ elif arg.find('name=') != -1:
+ remote_name=arg
+ for local_repo_arch in local_repo:
+ if all_repo_arch_build == 1:
+ local_repo_line+=line.replace(remote_url,local_repo_arch['baseurl']).replace(remote_name,local_repo_arch['name'])+'\n'
+ else:
+ local_repo_line+=line.replace(remote_url,local_repo_arch['baseurl']).replace(remote_name,local_repo_arch['name'])+' --priority=1\n'
+ if all_repo_arch_build == 1:
+ local_ks_content=remote_ks_content.replace(line,local_repo_line)
+ else:
+ local_ks_content=remote_ks_content.replace(line,line+' --priority=99\n'+local_repo_line)
+
+ return local_ks_content
+
+def _do_mic_build(ks_path):
+# cmd = 'gbs createimage --ks-file='+ks_path
+ cmd = mic_default_build_arg+ks_path
+ print 'mic build command is %s' %cmd
+ ret = subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+ if ret:
+ raise LocalError('Image creation error for ks:%s' %ks_path)
+
+
+def do_image_creation():
+ print '----start do_image_creation-----------------------------------'
+
+ tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+# atexit.register(shutil.rmtree, tmpdir)
+ ks_dir=tmpdir+'/ks_files/'
+ os.mkdir(ks_dir)
+
+ for repo in ref_profile['repository']:
+ image_url=ref_profile['ref_snapshot_url']+'/builddata/images/'+repo+'/image-configurations/'
+ res=urllib2.urlopen(image_url)
+ for line in res.read().split('"'):
+ if (all_repo_arch_build == 1 and line.find('.ks') != -1 and line.find('</a>') == -1)\
+ or (all_repo_arch_build == 0 and line.find('.ks') != -1 and line.find('</a>') == -1 and line.find(ARCHITECTURE) != -1):
+ ks_file=line
+ ks_url=image_url+ks_file
+ print 'ks url:%s' %ks_url
+ f = open(ks_dir+ks_file,'wb')
+ remote_ks_content=urllib2.urlopen(ks_url).read()
+ local_ks_content=_ks_add_local_repo(remote_ks_content,repo)
+ print 'file content:============================================================='
+ print local_ks_content
+ print '=========================================================================='
+ f.write(local_ks_content)
+ f.close()
+ _do_mic_build(ks_dir+ks_file)
+
+
+def main():
+ """script entry point"""
+
+ print '---[JOB STARTED]----------------------------------------'
+
+ event = get_gerrit_event()
+ # prepare separate temp directory for each build
+
+ if event['event_type'] != "ref-updated" or event['project'] != "scm/git-ref-mapping":
+ # This is just a sanity check as ref-updated is the only event we
+ # react on and it's configured in the job configuraion
+ print >> sys.stderr, "Configuration error: This job can't process"\
+ "project %s! Only scm/git-ref-mapping is allowed and " \
+ "event %s! Only ref-updated events are allowed" \
+ %(event['event_type'], event['project'])
+ return 1
+
+ git_prj = event['project']
+ git_branch = event['refname']
+ filename = 'git-ref-mapping.xml'
+ tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+ atexit.register(shutil.rmtree, tmpdir)
+ prjdir = os.path.join(tmpdir, git_prj)
+
+ # clone gerrit project to local dir
+ if not clone_gitproject(git_prj, prjdir):
+ print >> sys.stderr, 'Error cloning %s' %git_prj
+ return 1
+ mygit = Git(prjdir)
+ mygit.checkout(git_branch)
+
+ ref_binary = {}
+
+ lines = open(prjdir+'/'+filename).readlines()
+ for line in lines:
+ if line.find('branch OBS_project') != -1:
+ ref_binary[line.split('"')[1]] = line.split('"')[3]
+
+ #Add default profile supported by gbs
+ for profile in gbs_meta_default_profile.split(' '):
+ ref_binary[profile]=profile+':ref:latest'
+
+ print 'Each reference snapshot numbers are like below'
+ print ref_binary
+
+ _update_ref_bin_index(ref_binary)
+
+ replace_depanneur_file()
+ do_gbs_build()
+ do_image_creation()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+