--- /dev/null
+#!/usr/bin/python
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+
+import sys
+import os
+import subprocess
+import re
+import shutil
+import tempfile
+import atexit
+import urllib2
+import glob
+from common.git import Git, clone_gitproject
+from common.utils import sync
+
+class RuntimeException(Exception):
+ """Local error handler"""
+ pass
+
+class UtilsError(Exception):
+ """Local error handler"""
+ pass
+
+class LocalError(Exception):
+ """Local error exception."""
+ pass
+
+
+def gbs_conf_prefix(t_ver,profile,build_repository):
+ #[general] section
+ gbs_conf_text = '[general]\nfallback_to_native = true\nprofile = profile.'+t_ver+profile+'_'+build_repository+'\n\n\n'
+
+ #Profile section
+ gbs_conf_text += '################## Profile Section ##################\n\n'
+
+ return gbs_conf_text
+
+
+class GBSBuild:
+ """A class which supports with statement"""
+
+ def __init__(self, gbsdbbuild_project_id, obs_prj, basic_url, snapshot_num, full_dep_sel, new_pkg_list, rm_pkg_list, gbsbuild_workspace):
+ """ Initialize GBSBuild class"""
+ print "-----------------------------------------------------"
+ print "Initialize GBS Build Class"
+ print "-----------------------------------------------------"
+
+ workspace = os.getenv('WORKSPACE')
+
+ # prepare separate temp directory for each build
+ git_prj = 'scm/meta/obs'
+ git_branch="master"
+ tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+ atexit.register(shutil.rmtree, tmpdir)
+ prjdir = os.path.join(tmpdir, git_prj)
+
+ # clone gerrit project to local dir
+ if not clone_gitproject(git_prj, prjdir):
+ print >> sys.stderr, 'Error cloning %s' %git_prj
+ return 1
+ mygit = Git(prjdir)
+ mygit.checkout(git_branch)
+
+ if len(re.findall('\D', obs_prj.split(':')[1].replace('.',''))) != 0:
+ t_ver = ''
+ t_ver_path = '/'
+ t_branch = 'tizen'
+ profile_array = obs_prj.split(':')[1:]
+ else:
+ t_ver = obs_prj.split(':')[1] + '-'
+ t_ver_path = '/' + obs_prj.split(':')[1] + '/'
+ t_branch = 'tizen_'+obs_prj.split(':')[-2]
+ profile_array = obs_prj.split(':')[2:]
+
+ #find profile name
+ i=0
+ profile = ''
+ while i < len(profile_array):
+ profile += profile_array[i].lower() + '-'
+ i += 1
+ profile = profile.rstrip('-')
+
+ basic_snapshot_url = basic_url + t_ver + profile + "/"
+ if snapshot_num == 'latest':
+ snapshot_url = basic_snapshot_url + snapshot_num + "/"
+ else:
+ snapshot_url = basic_snapshot_url + "tizen-" + t_ver + profile + "_" + snapshot_num + "/"
+
+ if snapshot_url.split('/')[-1]:
+ snapshot_build_id = snapshot_url.split('/')[-1]
+ else:
+ snapshot_build_id = snapshot_url.split('/')[-2]
+
+ repository = []
+ arch_list= {}
+ path_prj_list= {}
+ obs_meta_file = prjdir + '/' + obs_prj + '/_meta'
+ lines = open(obs_meta_file).readlines()
+ for line in lines:
+ if line.find('repository name=') != -1:
+ repo_tmp=line.split('"')[1]
+ repository.append(repo_tmp)
+ arch_list[repo_tmp] = []
+ path_prj_list[repo_tmp] = []
+ if line.find('<arch>') != -1:
+ arch_tmp = line.split("<arch>")[1].split("</arch>")[0]
+ arch_list[repo_tmp].append(arch_tmp)
+ if line.find('path project=') != -1:
+ path_prj_arch = line.split('"')[1]+'##'+line.split('"')[3]
+ path_prj_list[repo_tmp].append(path_prj_arch)
+
+ prj_src_root = os.path.join(gbsbuild_workspace,'SRC-ROOT',obs_prj)
+ build_root = os.path.join(gbsbuild_workspace,'GBS-ROOT')
+ live_root = os.path.join(gbsbuild_workspace,'live')
+
+ if full_dep_sel == 'full build':
+ is_fullbuild = 'true'
+ else:
+ is_fullbuild = 'false'
+
+ build_pkg_list = {}
+ if new_pkg_list:
+ item_is_git_name = 'true'
+ for item in new_pkg_list.split(' '):
+ if item_is_git_name == 'true':
+ git_name = item
+ item_is_git_name = 'false'
+ else:
+ commit_id = item
+ item_is_git_name = 'true'
+ build_pkg_list[git_name] = commit_id
+
+ rm_pkg_list_array = []
+ if rm_pkg_list:
+ for git_name in rm_pkg_list.split(' '):
+ rm_pkg_list_array.append(git_name)
+
+ self.profile = profile
+ self.obs_prj = obs_prj
+ self.t_ver = t_ver
+ self.t_ver_path = t_ver_path
+ self.t_branch = t_branch
+ self.basic_snapshot_url = basic_snapshot_url
+ self.snapshot_num = snapshot_num
+ self.snapshot_url = snapshot_url
+ self.snapshot_build_id = snapshot_build_id
+ self.repository = repository
+ self.arch_list = arch_list
+ self.path_prj_list = path_prj_list
+ self.prj_src_root = prj_src_root
+ self.build_root = build_root
+ self.live_root = live_root
+ self.basic_url = basic_url
+ self.is_fullbuild = is_fullbuild
+ self.build_pkg_list = build_pkg_list
+ self.rm_pkg_list_array = rm_pkg_list_array
+ self.gbsdbbuild_project_id = gbsdbbuild_project_id
+
+ print 'profile: %s,obs_prj: %s,t_ver: %s,t_ver_path: %s,t_branch: %s,\
+ basic_snapshot_url: %s,snapshot_num: %s,snapshot_url: %s,snapshot_build_id: %s,\
+ repository: %s,arch_list: %s, path_prj_list: %s,prj_src_root: %s\
+ build_root: %s, basic_url: %s, is_fullbuild: %s, build_pkg_list: %s'\
+ %(profile,obs_prj,t_ver,t_ver_path,t_branch,basic_snapshot_url,snapshot_num,\
+ snapshot_url,snapshot_build_id,repository,arch_list,path_prj_list,prj_src_root,\
+ build_root,basic_url,is_fullbuild,build_pkg_list)
+
+ def convert_gbsbuild_to_dictionary(self):
+ dic = {}
+
+ dic['profile'] = self.profile
+ dic['obs_prj'] = self.obs_prj
+ dic['t_ver'] = self.t_ver
+ dic['t_ver_path'] = self.t_ver_path
+ dic['t_branch'] = self.t_branch
+ dic['basic_snapshot_url'] = self.basic_snapshot_url
+ dic['snapshot_num'] = self.snapshot_num
+ dic['snapshot_url'] = self.snapshot_url
+ dic['snapshot_build_id'] = self.snapshot_build_id
+ dic['repository'] = self.repository
+ dic['arch_list'] = self.arch_list
+ dic['path_prj_list'] = self.path_prj_list
+ dic['prj_src_root'] = self.prj_src_root
+ dic['build_root'] = self.build_root
+ dic['live_root'] = self.live_root
+ dic['basic_url'] = self.basic_url
+ dic['is_fullbuild'] = self.is_fullbuild
+ dic['build_pkg_list'] = self.build_pkg_list
+ dic['rm_pkg_list_array'] = self.rm_pkg_list_array
+ dic['gbsdbbuild_project_id'] = self.gbsdbbuild_project_id
+
+ return dic
+
+
+ def ___build_pkg_list_apply(self,manifest_content):
+ print '----start ___build_pkg_list_apply-----------------------------------'
+
+ build_pkg_list_new = {}
+ build_pkg_list_replace = {}
+
+ # build_pkg_list_new : git_name & commit_id for new packages
+ # build_pkg_list_replace : git_name & commit_id for already existing packages --> commit id has to be replaced
+ for git_name in self.build_pkg_list.keys():
+ if manifest_content.find(git_name) != -1:
+ build_pkg_list_replace[git_name] = self.build_pkg_list[git_name]
+ else:
+ build_pkg_list_new[git_name] = self.build_pkg_list[git_name]
+
+ # for build_pkg_list_replace, line which contains git_name has to be replaced to contain new commit id
+ replaced_manifest_content = ''
+ for each_line in manifest_content.split('\n'):
+ for git_name in build_pkg_list_replace.keys():
+ if each_line.find(git_name) != -1:
+ each_line = ' <project name="%s" path="%s" revision="%s"/>'\
+ %(git_name,git_name,build_pkg_list_replace[git_name])
+ break
+
+ rm_pkg_found = 'false'
+ for git_name in self.rm_pkg_list_array:
+ if each_line.find(git_name) != -1:
+ rm_pkg_found = 'true'
+ break
+ if rm_pkg_found == 'true':
+ continue
+
+ if each_line.find('</manifest>') != -1:
+ continue
+
+ replaced_manifest_content += each_line+'\n'
+
+ # for build_pkg_list_new, line has to be added newly
+ for git_name in build_pkg_list_new.keys():
+ replaced_manifest_content += ' <project name="%s" path="%s" revision="%s"/>\n'\
+ %(git_name,git_name,build_pkg_list_new[git_name])
+
+ replaced_manifest_content += '</manifest>'
+
+ return replaced_manifest_content
+
+
+ def __manifest_replace(self,repo):
+ print '----start __manifest_replace-----------------------------------'
+ manifest_file = self.prj_src_root+'/'+repo+'/.repo/manifests/'+self.profile+'/'+repo+'/projects.xml'
+ snapshot_manifest_url = self.snapshot_url+'/builddata/manifest/'+self.snapshot_build_id+'_'+repo+'.xml'
+ print 'snapshot_manifest_url : %s' %snapshot_manifest_url
+ res = urllib2.urlopen(snapshot_manifest_url)
+ snapshot_manifest_content = res.read()
+ new_snapshot_manifest_content = self.___build_pkg_list_apply(snapshot_manifest_content)
+ f = open(manifest_file,'wb')
+ f.write(new_snapshot_manifest_content)
+ f.close()
+
+
+ def _get_base_repo_in_gbs_conf(self,path_prj_arch,repo):
+ """ Get base repo url from obs project & repository"""
+ print "-------_get_base_repo_in_gbs_conf start-----------------"
+
+ daily_release_url= os.getenv('URL_PUBLIC_REPO_BASE') + '/releases/daily/tizen/'
+ basic_url = self.basic_url
+
+ base_repo = {}
+ obs_prj=path_prj_arch.split('##')[0]
+ path_arch=path_prj_arch.split('##')[1]
+ path_prj=obs_prj.replace('Tizen:','').replace(':','-').lower()
+
+ if obs_prj.find(':ref:') != -1:
+ # Case of specific base snapshot number used for path project is specified in obs meta
+ ref_build_id=obs_prj.split(':ref:')[1]
+ obs_prj=obs_prj.split(':ref:')[0]
+ snapshot_id=obs_prj.replace('Tizen:','').replace(':','-').lower()
+ temp_id = '/'+snapshot_id+'/tizen-'+snapshot_id+'_'+ref_build_id+'/repos/'+path_arch
+ url_candidates = []
+ url_candidates.append(basic_url+temp_id)
+ url_candidates.append(daily_release_url+temp_id)
+ url_candidates.append(public_basic_url+temp_id)
+ url_candidates.append(public_daily_release_url+temp_id)
+ for url in url_candidates:
+ try:
+ urllib2.urlopen(url)
+ except:
+ continue
+ else:
+ base_url = url
+ break
+ else:
+ # Case of Tizen:Base / Tizen:4.0:Base / Tizen 3.0:Base
+ # Must find base snapshot id which is lower than snapshot_num
+ snapshot_id=path_prj
+ res = urllib2.urlopen(basic_url + '/'+snapshot_id+'/')
+ base_url_list = res.read()
+ prev_build_id = '0'
+ base_build_id = '0'
+ for line in base_url_list.split('\n'):
+ if line.find('<a href="') != -1 and line.find('tizen-'+snapshot_id) != -1:
+ build_id=line.split('"')[1].replace('tizen-'+snapshot_id+'_','').rstrip('/')
+ if float(build_id) >= float(self.snapshot_num):
+ if prev_build_id == '0':
+ base_build_id = build_id
+ else:
+ base_build_id = prev_build_id
+ break
+ else:
+ base_build_id = build_id
+ prev_build_id = build_id
+ base_url = basic_url+'/'+snapshot_id+'/tizen-'+snapshot_id+'_'+base_build_id+'/repos/'+path_arch
+
+ print 'obs_prj: %s, snapshot_id: %s' %(obs_prj,snapshot_id)
+ print 'path_prj: %s, path_arch: %s, base_url: %s' %(path_prj,path_arch,base_url)
+
+ base_repo['repo_name']='repo.'+self.t_ver+self.profile+'_'+repo+'__'+path_prj+'_'+path_arch
+ base_repo['debug_repo_name']='repo.'+self.t_ver+self.profile+'_'+repo+'__'+path_prj+'_'+path_arch+'_debug'
+ base_repo['repo_url']=base_url+'/packages/'
+ base_repo['debug_repo_url']=base_url+'/debug/'
+
+ print 'base_repo: %s' %base_repo
+
+ return base_repo
+
+
+ def ___get_profile_repo_section_for_one_profile_repo(self,repo):
+ #profile section
+ profile = self.profile
+ t_ver = self.t_ver
+ t_ver_path = self.t_ver_path
+ snapshot_url = self.snapshot_url
+
+ gbs_conf_text = '############# '+ t_ver+profile+' #############\n'
+ gbs_conf_text += '[profile.'+t_ver+profile+'_'+repo+']\n'
+ if self.is_fullbuild == 'true':
+ gbs_conf_text += 'buildconf=./scm/meta/build-config'\
+ +t_ver_path+profile+'/'+repo+'_build.conf\n'
+ gbs_conf_text += 'repos = '
+ for path_prj_arch in self.path_prj_list[repo]:
+ base_repo = self._get_base_repo_in_gbs_conf(path_prj_arch,repo)
+ gbs_conf_text += base_repo['repo_name']+','+base_repo['debug_repo_name']+','
+ if self.is_fullbuild == 'false':
+ gbs_conf_text += 'repo.'+t_ver+profile+'_'+repo+','
+ gbs_conf_text += 'repo.'+t_ver+profile+'_'+repo+'_debug,'
+ gbs_conf_text = gbs_conf_text[:-1]
+
+ gbs_conf_text += '\n\n'
+
+ ####repo section####
+ gbs_conf_text += '\n####################### Repo Section#######################\n'
+
+ #base repo
+ gbs_conf_text += '\n############# base #############\n'
+
+ for path_prj_arch in self.path_prj_list[repo]:
+ base_repo = self._get_base_repo_in_gbs_conf(path_prj_arch,repo)
+ gbs_conf_text += '['+base_repo['repo_name']+']\n'
+ gbs_conf_text += 'url = '+base_repo['repo_url']+'\n'
+ gbs_conf_text += '['+base_repo['debug_repo_name']+']\n'
+ gbs_conf_text += 'url = '+base_repo['debug_repo_url']+'\n\n'
+
+ #profile repo
+ gbs_conf_text += '\n############# '+ t_ver+profile+' #############\n'
+ gbs_conf_text += '[repo.'+t_ver+profile+'_'+repo+']\n'
+ gbs_conf_text += 'url = '+snapshot_url+'repos/'+repo+'/packages/\n'
+ gbs_conf_text += '[repo.'+t_ver+profile+'_'+repo+'_debug]\n'
+ gbs_conf_text += 'url = '+snapshot_url+'repos/'+repo+'/debug/\n\n'
+
+ return gbs_conf_text
+
+
+ def __update_gbs_conf_for_one_profile_repo(self,repo):
+ print '----start __update_gbs_conf-----------------------------------'
+
+ gbs_conf_file = self.prj_src_root+'/'+repo+'/.gbs.conf'
+ if os.path.exists(gbs_conf_file):
+ os.remove(gbs_conf_file)
+
+ gbs_conf_text = gbs_conf_prefix(self.t_ver,self.profile,repo)
+ gbs_conf_text += self.___get_profile_repo_section_for_one_profile_repo(repo)
+
+ print gbs_conf_text
+
+ f = open(gbs_conf_file,'wb')
+ f.write(gbs_conf_text)
+ f.close()
+
+
+ def _do_repo_init_sync(self,repo):
+ print '----start _do_repo_init_sync-----------------------------------'
+ repo_src_root=self.prj_src_root+'/'+repo
+ if not os.path.exists(repo_src_root):
+ os.mkdir(repo_src_root)
+ os.chdir(repo_src_root)
+
+ #add '-u' option
+#psk repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('GERRIT_HOSTNAME_EXTERNAL'),
+ repo_init_arg = ' -u ssh://%s:%s/scm/manifest' %(os.getenv('GERRIT_HOSTNAME_EXTERNAL'),
+ os.getenv('GERRIT_SSHPORT'))
+ #add '-b' option
+ repo_init_arg += ' -b '+self.t_branch
+ #add '-m' option
+ repo_init_arg += ' -m '+self.profile+'_'+repo+'.xml'
+
+ cmd = 'repo init' + repo_init_arg
+
+ #do repo init
+ cmd = 'repo init' + repo_init_arg
+ print 'repo init cmd: %s' %cmd
+ ret = subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+ if ret != 0:
+ raise LocalError('repo int failed')
+
+ self.__manifest_replace(repo)
+
+ #do repo sync
+ print 'do repo sync'
+ cmd = 'repo sync'
+ ret = subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+ if ret != 0:
+ raise LocalError('repo sync failed')
+
+ self.__update_gbs_conf_for_one_profile_repo(repo)
+
+
+ def __is_gbs_fullbuild_result_fail(self,repo,arch):
+ print '----start __is_gbs_fullbuild_result_fail-----------------------------------'
+
+ profile_list=os.listdir(self.build_root+'/local/repos/')
+ for profile in profile_list:
+ if profile.find(repo) != -1:
+ profile_path=profile
+
+ index_file=self.build_root+'/local/repos/'+profile_path+'/'+arch+'/index.html'
+
+ f = open(index_file,'rb')
+ build_result=f.read()
+ f.close()
+
+ #summary is contents between 'Build Status Summary' and 'Build Statis Details'
+ summary=build_result.split('Build Status Summary')[1].split('Build Statis Details')[0]
+
+ total_pkg_num=summary.split('<td>')[1]
+ succeeded_pkg_num=summary.split('<td>')[2]
+
+ if total_pkg_num == succeeded_pkg_num:
+ print 'GBS fullbuild succeeded'
+ return 0
+ else:
+ print 'There are errors on gbs fullbuild'
+ return 1
+
+
+ def __find_binary_list_arg(self,repo):
+ """ Find argument of --binary-list : which is same as to find spec file name in build_pkg_list"""
+ print '----start __find_binary_list_arg-----------------------------------'
+ binary_list = ''
+ for git_name in self.build_pkg_list.keys():
+ for each_file in os.listdir(os.path.join(self.prj_src_root,repo,git_name,'packaging')):
+ if each_file.endswith('.spec'):
+ binary_list += each_file.rstrip('.spec')+','
+ binary_list = binary_list.rstrip(',')
+
+ print 'build spec file list: %s' %binary_list
+ return binary_list
+
+
+ def _do_repo_arch_gbs_fullbuild(self,repo,arch):
+ print '----start _do_repo_arch_gbs_fullbuild-----------------------------------'
+
+ gbs_default_build_arg='timeout 6h gbs build --threads=16 --define "jobs 8" --define "_smp_mflags -j8" --baselibs --clean-once'
+
+ #add arch
+ gbs_build_arg = ' -A '+arch
+
+ #add build root
+ gbs_build_arg += ' -B '+self.build_root
+
+ #for dependency build, add --rdeps and --binary-list
+ if self.is_fullbuild == 'false':
+ binary_list_arg = self.__find_binary_list_arg(repo)
+ gbs_build_arg += ' --rdeps --binary-list='+binary_list_arg
+
+ cmd = gbs_default_build_arg+gbs_build_arg\
+ +' | awk \'{ print strftime("%Y-%m-%d %H:%M:%S"), $0; fflush(); }\''
+ print 'gbs build argument is %s' %cmd
+
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+ if self.__is_gbs_fullbuild_result_fail(repo,arch):
+ # TRIGGER NEXT BUILD-MONITOR
+# update_message="GBS Fullbuild Failed"
+# if len(update_message) < 119:
+# trigger_next("BUILD-MONITOR", \
+# {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
+# 'snapshot_name':ref_profile['ref_snapshot_build_id'],
+# 'gbsfullbuild_string': update_message})
+
+ raise LocalError('There are errors on GBS fullbuild for repo:%s, arch:%s' %(repo,arch))
+
+
+ def do_gbs_build(self):
+ print '----start do_gbs_build-----------------------------------'
+
+
+ if not os.path.exists(self.prj_src_root):
+ os.mkdir(self.prj_src_root)
+
+ for repo in self.repository:
+# for repo in ['standard']:
+ self._do_repo_init_sync(repo)
+ for arch in self.arch_list[repo]:
+# for arch in ['armv7l']:
+ print 'OBS Project: %s, repository: %s, architecture: %s gbs build start'\
+ %(self.obs_prj,repo,arch)
+ self._do_repo_arch_gbs_fullbuild(repo,arch)
+
+
+ def copy_build_results_to_dl_server(self):
+ print '----start copy_build_results_to_dl_server-----------------------------------'
+
+ path_live_repo_base = os.getenv('PATH_LIVE_REPO_BASE').rstrip('/')
+ RSYNC_LIVE = os.getenv('IMG_SYNC_DEST_BASE')+'/'+os.path.basename(path_live_repo_base)
+ rpm_arch_type_list = 'aarch64 armv7l i586 i686 noarch vanish x86_64'
+
+ live_root = self.live_root
+ #reconstruct gbs build results as similar stucture of OBS live repo
+ if os.path.exists(live_root):
+ cmd = 'sudo rm -rf '+live_root
+ subprocess.call(cmd, stdout=sys.stdout,stderr=sys.stderr, shell=True)
+
+ os.mkdir(live_root)
+
+ obs_liverepo=live_root
+ sync_dest=os.path.join(RSYNC_LIVE,os.getenv('GBSDBBUILD_DL_POSTFIX'))
+ for subdir in self.obs_prj.split(':'):
+ if subdir != self.obs_prj.split(':')[-1]:
+ obs_liverepo=os.path.join(obs_liverepo,subdir+':')
+ sync_dest=os.path.join(sync_dest,subdir+':')
+ else:
+ obs_liverepo=os.path.join(obs_liverepo,subdir)
+ sync_dest=os.path.join(sync_dest,subdir)
+ if not os.path.exists(obs_liverepo):
+ os.mkdir(obs_liverepo)
+
+ obs_liverepo=os.path.join(obs_liverepo,self.gbsdbbuild_project_id)
+ if not os.path.exists(obs_liverepo):
+ os.mkdir(obs_liverepo)
+
+ sync_dest=os.path.join(sync_dest,self.gbsdbbuild_project_id)
+
+ buildlogs_root = os.path.join(obs_liverepo,'buildlogs')
+ os.mkdir(buildlogs_root)
+
+ sync_out_dir=obs_liverepo
+
+ profile_list=os.listdir(self.build_root+'/local/repos/')
+ for profile in profile_list:
+ build_result_repo_path=self.build_root+'/local/repos/'+profile+'/'
+ repository=profile.split('_')[-1]
+ live_repo_root=obs_liverepo+'/'+repository
+ if not os.path.exists(live_repo_root):
+ os.mkdir(live_repo_root)
+ ###########copy arch.rpm files##############
+ print '--------start copy arch.rpm files-----------------'
+ for arch in rpm_arch_type_list.split(' '):
+ if not os.path.exists(live_repo_root+'/'+arch):
+ os.mkdir(live_repo_root+'/'+arch)
+ for gbs_build_arch in os.listdir(build_result_repo_path):
+ print 'copy source file dir: %s' %(build_result_repo_path+'/'+gbs_build_arch+'/RPMS/*.'+arch+'.rpm')
+ print 'copy destination dir: %s' %(live_repo_root+'/'+arch+'/')
+ for file_name in glob.glob(build_result_repo_path+'/'+gbs_build_arch+'/RPMS/*.'+arch+'.rpm'):
+ shutil.copy(file_name, live_repo_root+'/'+arch+'/')
+ #Remove folder if there is no file in arch directory
+ if len(os.walk(live_repo_root+'/'+arch).next()[2]) == 0:
+ os.rmdir(live_repo_root+'/'+arch)
+ ###############copy src.rpm files##############
+ print '--------start copy src.rpm files-----------------'
+ for gbs_build_arch in os.listdir(build_result_repo_path):
+ if not os.path.exists(live_repo_root+'/src/'):
+ os.mkdir(live_repo_root+'/src/')
+ for file_name in glob.glob(build_result_repo_path+'/'+gbs_build_arch+'/SRPMS/*.src.rpm'):
+ shutil.copy(file_name, live_repo_root+'/src/')
+ ###############copy buildlog files##############
+ print '--------start copy buildlog files-----------------'
+ buildlogs_repository = os.path.join(buildlogs_root,repository)
+ os.mkdir(buildlogs_repository)
+ for gbs_build_arch in os.listdir(build_result_repo_path):
+ os.mkdir(os.path.join(buildlogs_repository,gbs_build_arch))
+ success_log_dest=os.path.join(buildlogs_repository,gbs_build_arch,'succeeded')
+ fail_log_dest=os.path.join(buildlogs_repository,gbs_build_arch,'failed')
+ os.mkdir(success_log_dest)
+ os.mkdir(fail_log_dest)
+ #succeeded packages
+ success_log_root=os.path.join(build_result_repo_path,gbs_build_arch,'logs/success')
+ fail_log_root=os.path.join(build_result_repo_path,gbs_build_arch,'logs/fail')
+ for success_pkg in os.listdir(success_log_root):
+ src_file=success_log_root+'/'+success_pkg+'/log.txt'
+ print 'success_log_dest: %s ,success_pkg: %s' %(success_log_dest,success_pkg)
+ dest_file=success_log_dest+'/'+re.findall('\D*\d*\D+\d*[-]',success_pkg)[0].rstrip('-')+'.buildlog.txt'
+ shutil.copy(src_file,dest_file)
+ #failed packages
+ for fail_pkg in os.listdir(fail_log_root):
+ src_file=fail_log_root+'/'+fail_pkg+'/log.txt'
+ dest_file=fail_log_dest+'/'+re.findall('\D*\d*\D+\d*[-]',fail_pkg)[0].rstrip('-')+'.buildlog.txt'
+ shutil.copy(src_file,dest_file)
+ #Remove folder if there is no file in arch directory
+ if len(os.walk(success_log_dest).next()[2]) == 0:
+ os.rmdir(success_log_dest)
+ if len(os.walk(fail_log_dest).next()[2]) == 0:
+ os.rmdir(fail_log_dest)
+
+ #Finally, rsync live folder to download server
+ print 'rsync sync_out_dir: %s, sync_dest: %s' %(sync_out_dir, sync_dest)
+ sync(sync_out_dir, sync_dest)
+
+
--- /dev/null
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (C) 2010, 2011, 2012, 2013, 2014 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; version 2
+# of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+"""
+This code is called by jenkins jobs triggered by OBS events.
+"""
+
+import os
+import sys
+import re
+import datetime
+import ast
+
+from common.repomaker import find_files, RepoMaker, RepoMakerError
+from common.buildtrigger import trigger_info, trigger_next
+from common.buildservice import BuildService
+from common.backenddb import BackendDB
+from common.snapshot import Snapshot, SnapshotError, snapshot_project_enabled
+from common.utils import make_latest_link
+from common.send_mail import prepare_mail
+from common.gbsutils import GBSBuild
+
+class LocalError(Exception):
+ """Local error exception."""
+ pass
+
+
+def replace_ks_file_for_gbsdbbuild(kickstart,build_id):
+ gbsdbbuild_dl_postfix = os.getenv('GBSDBBUILD_DL_POSTFIX')
+ replaced_ks=''
+
+ snapshot_build_id = ''
+ for x in build_id.split('_')[:-1]:
+ snapshot_build_id = snapshot_build_id+x+'_'
+ snapshot_build_id = snapshot_build_id.rstrip('_')
+
+ gbsdbbuild_project_id = build_id.split('_')[-1]
+
+ for line in kickstart.split('\n'):
+ if line.find('snapshots/tizen') != -1:
+ if line.find('base/') != -1:
+ line+=' --priority=99'
+ else:
+ repo_name=line.split('--name=')[1].split(' ')[0]
+ orig_line=line.replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/repos/')+' --priority=99\n'
+ add_line=line.replace(repo_name,'gbs_repo').replace('/snapshots/tizen/','/'+gbsdbbuild_dl_postfix+'/tizen/').replace('/@BUILD_ID@/repos/','/'+snapshot_build_id+'/'+build_id+'/repos/')+' --priority=1'
+ line=orig_line + add_line
+ replaced_ks+=line+'\n'
+
+ return replaced_ks
+
+
+def prepare_trigger_data(images, build_id, path_repo, project,
+ url_pub_base):
+ """
+ prepare_trigger_data:
+ Prepare the trigger data
+ Args:
+ images_ks (truple list): [(ks_file_name, ks_file_content),]
+ build_id (str): the prerelease repo build_id
+ """
+ download_host = os.getenv('DOWNLOAD_HOST')
+ trigger_data = {} #trigger job_imager
+ trigger_snapdiff = {}
+
+ print 'prepare_trigger_data, images: %s' %images
+ for repo_name in images:
+ trigger_data[repo_name] = []
+ trigger_snapdiff[repo_name] = []
+ for ksname, kickstart in images[repo_name].ksi.items():
+ print 'ksname: %s\nkickstart:%s' %(ksname,kickstart)
+ #temprary
+# if ksname.find('mobile-wayland-armv7l-tm1') == -1 and ksname.find('iot-headless-2parts-armv7l-rpi3') == -1:
+# if ksname.find('iot-headless-2parts-armv7l-rpi3') == -1:
+# continue
+ #end of temprary
+ name = ksname.replace('.ks', '')
+
+ kickstart = replace_ks_file_for_gbsdbbuild(kickstart,build_id)
+
+ if download_host:
+ # replace host name
+ kickstart = re.sub('^(repo .*--baseurl=[^:]+://)[^/]+(/.*)',
+ '\\1%s\\2' % download_host, kickstart,
+ count=1, flags=re.MULTILINE)
+ data = {'name': name,
+ 'kickstart': kickstart,
+ 'buildid': build_id,
+ 'images_path': os.path.join("images", repo_name, name),
+ 'project': project,
+ 'repo': repo_name,
+ 'repo_path': path_repo,
+ 'url_pub_base': url_pub_base
+ }
+
+ trigger_data[repo_name].append(data)
+ data_snapdiff = data.copy()
+ data_snapdiff.pop('kickstart')
+ trigger_snapdiff[repo_name].append(data_snapdiff)
+
+ return trigger_data, trigger_snapdiff
+
+
+def trigger_image_creation(trigger_data):
+ """Trigger the image_creation jobs"""
+ count = 0
+ for repo in trigger_data.keys():
+ for index, data in enumerate(trigger_data[repo]):
+ trigger_next('gbsfullbuild_image_trigger_%s_%s' % (repo, index), data)
+ count += 1
+ # Request number of imager nodes
+ if os.getenv("ONDEMAND_SLAVE_CONFIGURATION_ENABLED", "0") == "1":
+ if count > 0:
+ trigger_next("SLAVE_BUILDER", {"data":"dummy"}, \
+ extra_params={"ACTION": "REQUEST_WORKER", \
+ "PURPOSE": "JENKINS_IMAGER", \
+ "REQUESTED_NUM_EXECUTORS": "%d" % count})
+
+
+def create_local_targets(gbsbuild_dic):
+ local_targets = []
+ for repository in gbsbuild_dic['arch_list']:
+ local_targets.append({'Name':repository})
+ local_targets[-1]['Architectures'] = []
+ tmp_arch_list = gbsbuild_dic['arch_list'][repository]
+ for arch in tmp_arch_list:
+ if arch.startswith('i') and arch.endswith('86'):
+ local_targets[-1]['Architectures'].append('ia32')
+ else:
+ local_targets[-1]['Architectures'].append(arch)
+
+ return local_targets
+
+
+def make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic):
+ """
+ make repo.
+
+ Args:
+ project (str): OBS prerelease project name
+ repo (str): name of the OBS live repository
+ backenddb (BackendDB): backenddb instance
+ base_path (str): path to the location of snapshot
+ live_repo_base (str): path to live repo
+ Raises:
+ LocalError if can't create repos or can't find image configurations
+ """
+
+ try:
+# snapshot = Snapshot(backenddb, base_path, obs_project=project)
+
+ # Increment snapshot
+# snapshot.inc()
+
+ # Store variables into local rather than accessing Snapshot() instance.
+# local_build_id = snapshot.build_id
+# local_targets = snapshot.targets
+# local_path = snapshot.path
+# local_dir = snapshot.dir
+ local_build_id = gbsbuild_dic['snapshot_build_id']+'_'+gbsbuild_dic['gbsdbbuild_project_id']
+ local_targets = create_local_targets(gbsbuild_dic)
+ local_dir = os.path.join(os.getenv('GBSDBBUILD_DL_POSTFIX'),'tizen',gbsbuild_dic['t_ver']+gbsbuild_dic['profile'])
+ local_path = os.path.join(base_path,local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id)
+
+ # Delete Snapshot() instance.
+# del snapshot
+
+ print 'We are working on:'
+ print '\t%s\n\t%s\n\t%s\n\t%s\n\n' % (local_build_id, local_targets, local_path, local_dir)
+ sys.stdout.flush()
+ except SnapshotError, err:
+ raise LocalError("Error getting snapshot info: %s" % str(err))
+
+ targets = local_targets
+ live_repo_path = os.path.join(live_repo_base,
+ project.replace(':', ':/'),gbsbuild_dic['gbsdbbuild_project_id'])
+ parent_snapshot_path = os.path.join(base_path,'snapshots/tizen',
+ gbsbuild_dic['t_ver']+gbsbuild_dic['profile'],
+ gbsbuild_dic['snapshot_build_id'])
+ repos = {}
+ imagedatas = {}
+
+ # Convert live repo to download structure
+ for repo in targets:
+ repomaker = RepoMaker(local_build_id, local_path)
+
+ try:
+ repomaker.add_repo(live_repo_path, repo['Name'],
+ repo['Architectures'],move=False)
+ except RepoMakerError, err:
+ #raise LocalError("Unable to create download repo: %s" % err)
+ print "Unable to create download repo: %s" % repr(err)
+ repos.update({repo['Name']: {'archs': list(set(repo['Architectures']))}})
+ continue
+
+ # Assuming that there can be just one image-configurations-
+ # rpm in the repo
+ if not repomaker.has_images():
+ # repomaker did not found image-configurations in pre_release repo,
+ # let's take it from target repo, only one package repo is enough
+
+ # Add image configuration to prerelease repo
+ img_conf = find_files(os.path.join(parent_snapshot_path, 'repos',
+ repo['Name']),
+ prefix="image-configurations-",
+ suffix='noarch.rpm')
+ print 'parent_snapshot_path: %s' %parent_snapshot_path
+ print "image-conf: %s" %img_conf
+ img_conf_list = list(img_conf)
+ # whether exist package of image-configuration
+ if not img_conf_list:
+ #raise LocalError("Image configuration not found in %s" %
+ # snapshot.path)
+ print "Image configuration not found in %s, repo:%s" %(snapshot_path, repo['Name'])
+ continue
+
+ for rpm in img_conf_list:
+ repomaker.load_imagedata(rpm)
+
+ # whether exist ks poin to the repo
+ if not repomaker.has_images():
+ continue
+
+ repos.update(repomaker.repos)
+ imagedatas[repo['Name']] = repomaker.imagedata
+
+ # Generate image info to builddata/ dir
+ repomaker.gen_image_info()
+
+ # Generate repo manifest
+ manifest_items = repomaker.gen_manifest_info(repo['Name'],
+ os.getenv('GERRIT_FETCH_URL'),
+ os.getenv('GERRIT_REVIEW_URL'))
+ repomaker.gen_manifest_info_app_from_rpm(repo['Name'],
+ os.getenv('GERRIT_FETCH_URL'),
+ os.getenv('GERRIT_REVIEW_URL'),
+ live_repo_path, repo['Architectures'])
+ # Check duplicated items
+# path_list = [ t.keys()[0] for t in manifest_items if t.keys()[0] ]
+# duplicated = set([x for x in path_list if path_list.count(x) > 1])
+# if len(duplicated) >= 1:
+# prepare_mail('error_report_manifest.env', \
+# 'Snapshot %s have duplicated packages' % (local_build_id), \
+# '\nDuplicated items: %s' % duplicated, \
+# os.getenv('NOREPLY_EMAIL_SENDER'), \
+# os.getenv('MAILINGLIST_SYSTEM').split(','))
+
+ print 'psk-test\n%s' %imagedatas
+ return {'project': project,
+ 'repo': repos,
+ 'repo_path': os.path.join(local_dir,gbsbuild_dic['snapshot_build_id'],local_build_id),
+ 'build_id': local_build_id,
+ 'imagedata': imagedatas
+ }
+
+def main():
+ """Script entry point."""
+
+ print '---[JOB STARTED: %s ]-------------------------'
+ global buildmonitor_enabled
+ buildmonitor_enabled = os.getenv("BUILDMONITOR_ENABLED", "0") != "0"
+ print 'buildmonitor_enabled(%s)\n' % (buildmonitor_enabled)
+ if buildmonitor_enabled:
+ global bm_start_datetime
+ bm_start_datetime = datetime.datetime.now()
+
+ base_url = os.getenv("URL_PUBLIC_REPO_BASE")
+ base_path = os.getenv('PATH_REPO_BASE')
+ live_repo_base = os.path.join(os.getenv('PATH_LIVE_REPO_BASE'),os.getenv('GBSDBBUILD_DL_POSTFIX'))
+
+ fields = trigger_info(os.getenv("TRIGGER_INFO"))
+ gbsbuild_dic = fields['gbsbuild_dic']
+ snapshot_name = gbsbuild_dic['snapshot_build_id']
+ project = gbsbuild_dic['obs_prj']
+
+ # Init backend database
+ redis_host = os.getenv("REDIS_HOST")
+ redis_port = int(os.getenv("REDIS_PORT"))
+ backenddb = BackendDB(redis_host, redis_port)
+
+ repo_data = make_repo(project, backenddb, base_path, live_repo_base, gbsbuild_dic)
+
+ # prepare trigger data for image creation jobs and snapdiff sync jobs
+ trigger_data, trigger_snapdiff = prepare_trigger_data(
+ repo_data['imagedata'],
+ repo_data['build_id'],
+ repo_data['repo_path'],
+ project, base_url)
+
+ # trigger image creation jobs
+ trigger_image_creation(trigger_data)
+
+ # trigger post snapshot creation job with repo data
+ data = repo_data.copy()
+ # remove unused item
+ data.pop('imagedata')
+ trigger_next("gbsfullbuild-buildlogs", data)
+
+ # TRIGGER NEXT BUILD-MONITOR-Success
+# update_message="Succeeded"
+# if len(update_message) < 119:
+# trigger_next("BUILD-MONITOR-Success", \
+# {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
+# 'snapshot_name':snapshot_name,
+# 'gbsfullbuild_string': update_message})
+
+ # TRIGGER NEXT BUILD-MONITOR-Fail
+# update_message="Image Creation Failed"
+# if len(update_message) < 119:
+# trigger_next("BUILD-MONITOR-Failed", \
+# {'bm_stage':'GBSFULLBUILD_SNAPSHOT',
+# 'snapshot_name':snapshot_name,
+# 'gbsfullbuild_string': update_message})
+
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main())
+ except LocalError, error:
+ print error
+ sys.exit(1)
+