--- /dev/null
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import json
+import requests
+import urlparse
+import ast
+from time import sleep
+from datetime import datetime
+
+import urllib2
+
+from osc import conf, core
+from common.buildservice import BuildService
+from common.buildtrigger import trigger_info, trigger_next
+from gitbuildsys.errors import ObsError
+import xml.etree.ElementTree as ET
+import xml.etree.cElementTree as ElementTree
+from common.mapping import git_obs_map, get_ref_map, git_obs_map_full_list
+from common.gerrit import GerritEnv
+
+class LocalError(Exception):
+ """Local error exception."""
+ pass
+
+class ref_create_project_obs(object):
+ """ create reference project to obs """
+ profiles = ast.literal_eval(os.getenv("REF_ACTIVE_PRJS"))
+ pattern = r'tizen[0-9a-zA-Z_\-\.]*[0-9]{8}.[0-9]{1,2}'
+
+ def setup_profile(self, project, repo_path=None):
+
+ # set up profile
+ for l in self.profiles:
+ if project == l['project']:
+ self.profile = l
+ self.profile['snapshot_url'] = os.path.dirname(os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"),
+ repo_path))
+ self.profile['snapshot_username'] = os.getenv("REF_SNAPSHOT_USERNAME",'')
+ self.profile['snapshot_password'] = os.getenv("REF_SNAPSHOT_PASSWORD",'')
+ print 'project = %s' % (project)
+ return self.profile
+ return False
+
+ def remove_duplicates(self, t, s):
+ """docstring for make_unique:"""
+ result=[]
+ result=list(set(t) - set(s))
+ return result
+
+ def get_manifest_filelists_snapshot(self, profile, request_url, timeout=5, group=None):
+ """ get manifest filelists from snapshot"""
+ p = re.compile(r'alt=\"\[(TXT|DIR| )]*\]\".*<a href=\"(.*)\">')
+
+ if not request_url:
+ return []
+ print request_url
+ # get data from url
+ for loop in range(10):
+ try:
+ f = requests.get(request_url,
+ auth=(profile['snapshot_username'],
+ profile['snapshot_password']),
+ timeout=timeout)
+ if f.status_code == 200:
+ break
+ except requests.exceptions.Timeout as e:
+ print(e)
+ continue
+ except requests.exceptions.ConnectionError as e:
+ print(e)
+ continue
+ except Exception as e:
+ print(e)
+ raise Exception('exception from get manifest filelists')
+ else:
+ raise Exception('can\'t get manifest filelists')
+
+ # returns
+ results = {}
+ exclude_pkgs = []
+ found_links = p.findall(f.text)
+ for link in found_links:
+ manifest_url = os.path.join(request_url, link[1])
+ if link[0] == 'TXT':
+ f = requests.get(manifest_url,
+ auth=(profile['snapshot_username'],
+ profile['snapshot_password']),
+ timeout=timeout)
+ try:
+ tree = ElementTree.fromstring(f.text)
+ except ElementTree.ParseError:
+ raise ElementTree.ParseError
+ for result in tree.findall('project'):
+ if '_preloadapp.xml' in link[1]:
+ exclude_pkgs.append(''.join(result.get('path')))
+ else:
+ results[''.join(result.get('path'))] = result.get('revision')
+
+ if group == 'abs':
+ preloadapp_pkgs = {}
+ for app in exclude_pkgs:
+ preloadapp_pkgs[app] = results[app]
+ return preloadapp_pkgs
+
+ #print results
+ return results
+
+
+ def get_snapshot_version(self, profile, timeout=5):
+ p = re.compile(self.pattern)
+
+ if not profile['snapshot_url']:
+ return []
+
+ # get data from url
+ for loop in range(10):
+ try:
+ f = requests.get(profile['snapshot_url'],
+ auth=(profile['snapshot_username'],
+ profile['snapshot_password']),
+ timeout=timeout)
+ if f.status_code == 200:
+ break
+ except requests.exceptions.Timeout as e:
+ print(e)
+ continue
+ except requests.exceptions.ConnectionError as e:
+ print(e)
+ continue
+ except Exception as e:
+ print(e)
+ raise Exception('exception from get_latest_snapshot_version')
+ else:
+ raise Exception('can\'t get latest snapshot version')
+
+ # return snapshot version
+ results = [str(s) for s in p.findall(f.text)]
+ #print results
+ if len(results) > 2:
+ return [results[-1], results[-3]]
+ else:
+ return [results[-1]]
+
+ def create_project(self, build, target, info, meta, config, baserefproject=None):
+ """
+ create project
+ """
+
+ try:
+ if not build.exists(target):
+ try:
+ build.create_project(target, None, description=json.dumps(info))
+ except ObsError, error:
+ raise LocalError("Unable to create project %s: %s" % (target, error))
+
+ # set meta
+ xml_meta = ElementTree.fromstringlist(meta)
+ #change the target project name
+ xml_meta.set('name',target)
+ #delete remote person
+ for person_element in xml_meta.findall('person'):
+ xml_meta.remove(person_element)
+ #delete link project
+ for link in xml_meta.findall('link'):
+ xml_meta.remove(link)
+
+ # replace
+ if baserefproject:
+ for repo_element in xml_meta.findall('repository'):
+ for element in repo_element.findall('path'):
+ element.set('project',baserefproject)
+
+ #print ElementTree.tostring(xml_meta)
+ #add target person
+ element = ElementTree.Element('person', {"userid": "%s" % (os.getenv("GERRIT_USERNAME")),"role": "maintainer"})
+ xml_meta.append(element)
+ print ElementTree.tostring(xml_meta)
+ build.set_meta(ElementTree.tostring(xml_meta), target)
+ # set project config
+ #print config
+ build.set_project_config(target, config)
+
+ #disable publish flag
+ build.disable_build_flag(target, repo = None, flag="publish", status="disable")
+ #disable build flag
+ build.disable_build_flag(target, repo = None, flag="build", status="disable")
+ #disable debuginfo flag
+ build.disable_build_flag(target, repo = None, flag="debuginfo", status="disable")
+ #disable useforbuild flag
+ build.disable_build_flag(target, repo = None, flag="useforbuild", status="disable")
+ print "\nTarget project %s created" %(target)
+ return True
+ else:
+ print "\nTarget project %s exist" %(target)
+ return False
+ except ObsError, error:
+ raise LocalError("Unable to create project %s: %s" % (target, error))
+
+ def list_packages_from_remote(self, remote_build, build, profile, target, packages=None, existing=False):
+ """
+ "<services><service name='gbs'>" \
+ "<param name='revision'>%s</param>" \
+ "<param name='url'>%s</param>" \
+ "<param name='git-meta'>_git-meta</param>" \
+ "<param name='error-pkg'>2</param>" \
+ "</service></services>"
+ """
+ ret_dict = {}
+
+ sourceinfo = remote_build.get_sourceinfo_list(profile)
+ upload_packages=""
+ if packages:
+ upload_packages = packages
+ else:
+ upload_packages = [ package for package in sourceinfo if not re.search("_aggregate", package) ]
+ for package in upload_packages:
+ if sourceinfo[package]:
+ print " [sourceinfo(%s)] %s" % (package, sourceinfo[package])
+ link_prj, link_pkg = sourceinfo[package][-1].split('/')
+ if link_prj and link_pkg :
+ continue
+ elif re.search("_aggregate", package):
+ print " [_aggregate] %s/%s" % (profile, package)
+ aggregate_file_name="_aggregate"
+ remote_build.get_source_file(profile, package, aggregate_file_name)
+ content = ""
+ with open(aggregate_file_name, 'r') as f:
+ content = f.read()
+
+ if not re.search("qemu_aggregate", package):
+ content_xml_root = ElementTree.fromstringlist(content)
+ for element in content_xml_root.findall('aggregate'):
+ element.set('project',target)
+ content = ElementTree.tostring(content_xml_root)
+
+ print " [_aggregate] O %s" % (package)
+
+ meta = remote_build.get_meta(profile, package)
+ xml_meta = ElementTree.fromstringlist(meta)
+ xml_meta.set('project',target)
+ meta = ElementTree.tostring(xml_meta)
+ print " [_aggregate] META %s: %s" % (package, meta)
+
+ ret_dict[package] = {'type': aggregate_file_name, \
+ 'content': content, \
+ 'meta': meta}
+ else:
+ service_file_name = "_service"
+ remote_build.get_source_file(profile, package, service_file_name)
+ content = ""
+ with open(service_file_name, 'r') as f:
+ content = f.read()
+
+ if content:
+ _git = ''
+ _rev = ''
+ _root = ElementTree.fromstringlist(content)
+ for elem in _root.findall('service'):
+ for param in elem.findall('param'):
+ if param.get('name') == 'url':
+ _url = param.text
+ if len(_url.split(os.getenv('GIT_CACHE_DIR')+'/')) == 2:
+ _git = _url.split(os.getenv('GIT_CACHE_DIR')+'/')[1]
+ elif len(_url.split(os.getenv('GERRIT_FETCH_URL')+'/')) == 2:
+ _git = _url.split(os.getenv('GIT_CACHE_DIR')+'/')[1]
+ if param.get('name') == 'revision':
+ _rev = param.text
+
+ print " [_service] O %s (%s,%s)" % (package, _git, _rev)
+
+ ret_dict[package] = {'type': service_file_name, \
+ 'content': content, \
+ 'meta': None, \
+ 'git': _git, \
+ 'rev': _rev, \
+ 'exist': existing}
+ else:
+ print " [_service] X %s" % (package)
+
+ return ret_dict
+
+ def _update_packages(self, remote_build, build, profile, target, upload_packages):
+ dbg_idx = 0
+ for package in upload_packages:
+ dbg_idx += 1
+ _update_type = upload_packages[package]['type']
+ raw = upload_packages[package]['content']
+ with open(_update_type, 'w') as f:
+ f.write(raw)
+ #if not build.exists(target, package):
+ if True:
+ try:
+ build.create_package(target, package)
+ except ObsError, error:
+ #raise UploadError('[%s] Unable to create package %s/%s :%s' \
+ print '[%s] Unable to create package %s/%s :%s' \
+ % (_update_type, target, package, error)
+ commit_msg="uploaded to package %s/%s from %s" % (target, package, profile)
+ try:
+ build.commit_files(target, package,
+ [(_update_type, True)], commit_msg)
+ except ObsError, error:
+ raise UploadError("Unable to upload %s to %s: %s" % \
+ (_update_type, target, error))
+
+ sleep(0.5) # Wait 0.5 seconds...
+
+ if _update_type == '_aggregate':
+ build.set_meta(upload_packages[package]['meta'], target, package)
+
+ def update_packages(self, remote_build, build, profile, target, upload_packages):
+ print ' Total packages to check... %d' % len(upload_packages)
+ plain_packages = upload_packages.copy()
+
+ # Move time-consuming packages to front
+ power_packages = {}
+ if os.getenv("REF_UPLOAD_BIG_PKG_LIST"):
+ for x in os.getenv("REF_UPLOAD_BIG_PKG_LIST").split(','):
+ if x in upload_packages:
+ power_packages[x] = upload_packages[x]
+ del plain_packages[x]
+ if power_packages:
+ self._update_packages(remote_build, build, profile, target, power_packages)
+ if upload_packages:
+ self._update_packages(remote_build, build, profile, target, plain_packages)
+
+ def create_related_packages(self, remote_build, build, profile, target):
+ """create the 'link' package that relate the original package
+ profile is the base project
+ target is the target project
+ """
+ sourceinfo = remote_build.get_sourceinfo_list(profile)
+ for package in sourceinfo:
+ if sourceinfo[package]:
+ link_prj, link_pkg = sourceinfo[package][-1].split('/')
+ if link_prj and link_pkg:
+ if build.exists(target, package):
+ build.delete_package(target, package)
+ build.create_link_pac(target, link_pkg, \
+ target, package)
+ print ' [_link] %s/%s -> %s/%s' % (target, link_pkg, target, package)
+
+ def compare_with_manifest(self, todo_dict, manifest_packages):
+ #TODO: If we have changed git path???
+ #TODO: If manifest items are not proceeded???
+ for item in todo_dict:
+ if 'git' in todo_dict[item] and todo_dict[item]['git'] \
+ and todo_dict[item]['git'] in manifest_packages:
+ rev_my = todo_dict[item]['rev']
+ rev_snapshot = manifest_packages[todo_dict[item]['git']]
+ if rev_my != rev_snapshot:
+ print ' >> DIFFER (%s) -> (%s) %s' % (rev_my, rev_snapshot, item)
+ todo_dict[item]['rev'] = rev_snapshot
+ todo_dict[item]['content'] = todo_dict[item]['content'].replace(rev_my, rev_snapshot)
+ todo_dict[item]['exist'] = False
+
+ # Remove packages that are already exists which do not need to update
+ for k, v in todo_dict.items():
+ if 'exist' in v and v['exist'] == True:
+ del todo_dict[k]
+ return todo_dict
+
+ def run_profile_update(self, build, this_project, target):
+ """ run update profile project """
+ if build.exists(this_project):
+ try:
+ build.unlink_project(this_project)
+ except Exception, err:
+ print 'Your project %s is broken: %s. Re-create it!' % (this_project, err)
+ build.cleanup(this_project, "Create new.")
+ sleep(10) # Wait 30 seconds...
+ build.create_project(this_project, target)
+ else:
+ build.create_project(this_project, target)
+
+ build.disable_build_flag(this_project, repo = None, flag='build', status='disable')
+ build.disable_build_flag(this_project, repo = None, flag='publish', status='disable')
+ build.disable_build_flag(this_project, repo = None, flag="debuginfo", status="disable")
+ build.disable_build_flag(this_project, repo = None, flag="useforbuild", status="disable")
+
+ #build.link_project(this_project, src=target, linktype=None)
+
+ target_meta = build.get_meta(target)
+ print 'OK we got target_meta...\n%s' % target_meta
+ target_xml_meta = ElementTree.fromstringlist(target_meta)
+ target_xml_meta.attrib['name'] = this_project
+ target_xml_meta.find('title').text = 'Reference from %s' % target
+ for repo_element in target_xml_meta.findall('repository'):
+ for element in repo_element.findall('path'):
+ repo_element.remove(element)
+ #element.set('project', target)
+ #element.set('repository', repo_element.get('name'))
+ repo_element.insert(-1, ElementTree.Element('path', project=target, repository=repo_element.get('name')))
+
+ print 'set meta started...\n%s' % ElementTree.tostring(target_xml_meta)
+ build.set_meta(ElementTree.tostring(target_xml_meta), this_project)
+
+ build.link_project(this_project, src=target, linktype=None)
+
+ print 'Profile %s updated to %s' % (this_project, target)
+
+ def run_ref_create_project_obs(self):
+
+ force_trigger = False
+
+ print '---[JOB STARTED]-------------------------'
+
+ obs_api = os.getenv("REF_TARGET_OBS_API_URL")
+ obs_user = os.getenv("REF_TARGET_OBS_API_USERNAME")
+ obs_passwd = os.getenv("REF_TARGET_OBS_API_PASSWD")
+
+ remote_obs_api = os.getenv("REF_REMOTE_OBS_API_URL")
+ remote_obs_user = os.getenv("REF_REMOTE_OBS_API_USERNAME")
+ remote_obs_passwd = os.getenv("REF_REMOTE_OBS_API_PASSWD")
+
+ # default value is null
+ if os.getenv("REF_GERRIT_NAME"):
+ gerrit_env = GerritEnv(os.getenv("REF_GERRIT_NAME"))
+ else:
+ gerrit_env = GerritEnv("")
+
+ remote_meta = ''
+ config = ''
+ remote_package_list = ''
+
+ todo_dict = {}
+
+ # precheck profile
+ fields = ''
+ fields = trigger_info(os.getenv('TRIGGER_INFO'))
+
+ # Check if we've got required field in TRIGGER_INFO
+ for field in ('repo_path', 'build_id', 'project'):
+ if field not in fields:
+ print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+ return -1
+
+ self.profile = self.setup_profile(fields['project'], fields['repo_path'])
+ if not self.profile:
+ print 'Skip Sync OBS project %s' % fields['project']
+ return 0
+
+ print '\nJOB Started at %s' % (str(datetime.now()))
+
+ src = profile = self.profile['project']
+ baseproject = self.profile['baseproject']
+ build_id = fields['build_id']
+ buildid = fields['build_id'].split('_')[1]
+ snapshotdir = fields['repo_path']
+ #print "buildid %s" %(buildid)
+
+ refname = "%s:ref:" % (profile)
+ target = "%s%s" %(refname,buildid)
+
+ #### remote buildservice ####
+ remote_build = BuildService(remote_obs_api, remote_obs_user, remote_obs_passwd)
+ #### target buildservice ####
+ build = BuildService(obs_api, obs_user, obs_passwd, \
+ remote_obs_api, remote_obs_user, remote_obs_passwd)
+
+ #### Update description for ref project ####
+ if os.getenv('TRIGGER_MISC') == 'importrpmdone':
+ return self.run_profile_update(build, self.profile['project'], target)
+
+ #get project list
+ projectlist = [ prj for prj in build.get_package_list("") if prj.split(':')[0] == "Tizen" ]
+ #print "\n********\n Project List=%s" % projectlist
+
+ refprojectlist = [ obj for obj in projectlist if refname in obj ]
+ #exclude target project in refprojectlist
+ if target in refprojectlist:
+ refprojectlist.remove(target)
+ #print "\n Ref Project List=%s" % refprojectlist
+
+ # get base ref project
+ # get list of ref projects from git-ref-mapping
+ obs_ref_prjs = get_ref_map(baseproject, None, \
+ gitcache=gerrit_env.gitcache, \
+ gerrit_hostname=gerrit_env.hostname, \
+ gerrit_username=gerrit_env.username, \
+ gerrit_sshport=gerrit_env.sshport
+ )
+ print 'baseproject:%s' % baseproject
+ print 'obs_ref_prjs:%s' % obs_ref_prjs
+ base_mapping_list = [ x['OBS_package'] for x in obs_ref_prjs if x['OBS_project'] == baseproject ]
+ print 'base_mapping_list:%s' % base_mapping_list
+ baserefproject = ''
+ if base_mapping_list:
+ baserefproject = base_mapping_list[0]
+ print "\n Ref Base Project from mapping=[%s]" % baserefproject
+ else:
+ if baseproject:
+ basereflists = []
+ [ basereflists.append(obj) for obj in projectlist \
+ if "%s:ref:" % (baseproject) in obj ]
+ if basereflists:
+ baserefproject = list(basereflists)[-1]
+ print "\n Ref Base Project from latest=[%s]" % baserefproject
+ print "\n********"
+
+ if not refprojectlist:
+ src = profile
+ build_src = remote_build
+ else:
+ src = list(refprojectlist)[-1]
+ build_src = build
+ print "src = %s , target = %s" %(src, target)
+
+ print " 0) Get meta, config, package list from remote [ %s ]" % (profile)
+ remote_meta = remote_build.get_meta(profile)
+ config = remote_build.get_project_config(profile)
+ remote_package_list = [ p for p in remote_build.get_sourceinfo_list(profile) ]
+ print '\nGet Package List from Remote Done at %s' % (str(datetime.now()))
+
+ print "\n 0-1) copy package list (%d):\n %s" %(len(remote_package_list), remote_package_list)
+ if not build.exists(target):
+ print "\n 0-2) create project: %s" %(target)
+ self.create_project(build, target, None, remote_meta, config, baserefproject)
+ todo_dict = self.list_packages_from_remote(build_src, build, src, target)
+ else:
+ print "\n 0-2) project already exists: %s" %(target)
+ todo_dict = self.list_packages_from_remote(build_src, build, target, target, existing=True)
+
+ print '\nListing from Remote Done at %s' % (str(datetime.now()))
+ print 'todo_dict(%d):' % (len(todo_dict))
+
+ if True:
+ package_list = [ x for x in todo_dict ]
+ print "\n********"
+ print " 1) package list of target project \n %s" %(package_list)
+
+ packages = self.remove_duplicates(package_list, remote_package_list)
+ print "\n********"
+ print " 2) remove package %s" %(packages)
+ for pkgname in packages:
+ del todo_dict[pkgname]
+ #build.delete_package(target, pkgname)
+
+ packages = self.remove_duplicates(remote_package_list, package_list)
+ print "\n********"
+ print " 3) add packages %s" %(packages)
+ if packages:
+ ret_dict_add = self.list_packages_from_remote(\
+ remote_build, build, profile, target, packages=packages)
+ if ret_dict_add:
+ todo_dict.update(ret_dict_add)
+ print '\nAdd Remove Done at %s' % (str(datetime.now()))
+
+ print "\n********"
+ print " 4) compare package project "
+ manifest_packages = self.get_manifest_filelists_snapshot(self.profile, \
+ os.path.join(os.getenv("URL_PUBLIC_REPO_BASE"), \
+ snapshotdir,\
+ "builddata/manifest"))
+ todo_dict = self.compare_with_manifest(todo_dict, manifest_packages)
+ print '\nCompare With Manifest Done at %s' % (str(datetime.now()))
+
+ print '\n 4-1) Final packages to be updated %d' % len(todo_dict)
+ sys.stdout.flush()
+ self.update_packages(remote_build, build, profile, target, todo_dict)
+ print '\nUpdate Packages Done at %s' % (str(datetime.now()))
+
+ print "\n********"
+ print " 5) Precheck all error package list from project"
+
+ need_runservice = []
+ for _wait in range(0,30):
+ sys.stdout.flush()
+ sleep(10) # Wait 10 seconds...
+ viewinfofile = build.get_source_viewinfo(target, nofilename=0)
+ root = ElementTree.parse(viewinfofile).getroot()
+ errpackages = ''
+ errpackages = [ s.get('package') for s in root.iter('sourceinfo') if s.findall('error') ]
+ for x in root.iter('sourceinfo'):
+ for y in x.findall('filename'):
+ if '_service:gbs:service-error.spec' in y.text:
+ errpackages.append(x.get('package'))
+ break
+ for y in x.findall('error'):
+ print x.get('package'),y.text
+ if 'bad build configuration, no build type' in y.text:
+ errpackages.remove(x.get('package'))
+ print errpackages
+ if errpackages:
+ print ' 5-1) Under packages are still... (%d)\n %s' % (len(errpackages), errpackages)
+ # Retrigger git sync
+ for item in errpackages:
+ pkgview = ElementTree.fromstring(build.get_source_info(target, item))
+ for sv in pkgview.findall('serviceinfo'):
+ if sv.get('code') != 'failed': continue
+ for er in sv.findall('error'):
+ print ' %s %s with cause: (%s)' % (item, sv.get('code'), er.text)
+ need_runservice.append(item)
+ sys.stdout.flush()
+ sleep(30) # Wait 30 seconds...
+ for item in need_runservice:
+ print ' runservice for %s' % item
+ build.runservice(target, item)
+ need_runservice = []
+ else:
+ print ' 5-2) All packages imported.'
+ break
+
+ print "\n********"
+ print " 6) create related packages \n"
+ self.create_related_packages(remote_build, build, profile, target)
+
+ print "\n********"
+ print " 7) Sync Done..."
+
+ # importrpm trigger next
+ fields['profile'] = profile
+ fields['target'] = target
+ trigger_next("REF_IMPORT_RPM_OBS", fields)
+
+ if errpackages:
+ return 1
+ return 0
+
+ def main(self):
+
+ print('-----[JOB STARTED: ref_create_project_obs ]-----')
+ for loop in range(1):
+ try:
+ return self.run_ref_create_project_obs()
+ except Exception, err:
+ print 'run_ref_create_project_obs operation failed, retrying...'
+ print err
+ raise LocalError("FAIL %s" % (err))
+ sleep(5)
+
+ return True
+
+if __name__ == '__main__':
+
+ try:
+ trigger = ref_create_project_obs()
+ sys.exit(trigger.main())
+ except Exception as e:
+ print(e)
+ sys.exit(1)
+
--- /dev/null
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2014, 2015, 2016 Samsung Electronics.Co.Ltd.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+# for more details.
+#
+import os
+import re
+import sys
+import json
+import subprocess
+import gzip
+import shutil
+from datetime import datetime
+from common.buildtrigger import trigger_info, trigger_next
+from common.utils import xml_to_obj, execute_shell
+
+class LocalError(Exception):
+ """Local error exception."""
+ pass
+
+class ref_import_rpm_obs(object):
+
+ profile = None
+ profile_basedir = None
+ profile_repos = {}
+
+ def setup_profile(self, name, project):
+
+ # set up profile
+ self.profile = {'name': name, \
+ 'refprj': project, \
+ 'dest_dir': '/srv/obs/build', \
+ 'option': 1}
+
+ #prepare dir
+ basedir = os.path.join(self.profile['dest_dir'],self.profile['refprj'])
+ if os.path.exists(basedir):
+ #shutil.rmtree(basedir)
+ print "exists"
+ else:
+ print "%s dir is not exists" %(basedir)
+ return False
+ self.profile_basedir = basedir
+ #os.mkdirs(basedir)
+ repo_dirs = {}
+ repo_dirs['repo'] = [ (repo) for repo in os.listdir(basedir) if os.path.isdir(os.path.join(basedir, repo))]
+ for repo_dir in repo_dirs['repo']:
+ repo_dirs[repo_dir] = [ (arch_dir, os.path.join(repo_dir, arch_dir)) \
+ for arch_dir in os.listdir(os.path.join(basedir,repo_dir)) \
+ if os.path.isdir(os.path.join(basedir,repo_dir, arch_dir)) ]
+
+ self.profile_repos = repo_dirs
+ for repo in self.profile_repos['repo']:
+ for arch , path in self.profile_repos[repo]:
+ print 'repo = %s , arch = %s, path = %s' %(repo, arch, path)
+
+ #self.profile['repoarch']
+ print 'project = %s' % (project)
+ sys.stdout.flush()
+
+ return self.profile
+
+ def copy_rsync_rpm(self, repospath, archfilelists, dstdir):
+ """
+ """
+ file_name="/srv/obs/build/_filelist"
+ content = ""
+
+ if os.path.isfile(file_name):
+ os.remove(file_name)
+
+ with open(file_name, 'w') as f:
+ for filepath in archfilelists:
+ f.write(os.path.join(repospath,filepath)+'\n')
+ cmd = "cat %s | " %(file_name)
+ cmd += "xargs -n 1 -P 8 -I% rsync -avz --bwlimit=5120000 % "
+ cmd += "%s/" %(dstdir)
+ print 'cmd = %s' %(cmd)
+ subprocess.call(cmd, shell=True)
+
+ def construct_srv_obs_build_project_repo_package(self, rsyncd, snapshotdir):
+ """
+ """
+ def clear_primary_files(self):
+ for repo in self.profile_repos['repo']:
+ dstdir = os.path.join(self.profile_basedir, repo)
+ primarymd = os.path.join(dstdir, \
+ [ x for x in os.listdir(dstdir) \
+ if x.endswith('-primary.xml.gz') ][0])
+ os.remove(primarymd)
+
+ print "\n\n3) RSYC DOWNLOAD PRIMARY AND DEPENDS XML for %s at %s " % (self.profile['name'], datetime.now())
+ for repo in self.profile_repos['repo']:
+ repodata_path = os.path.join(rsyncd,snapshotdir,"repos",repo,"packages","repodata")
+ cmd = "rsync %s/ --list-only --include='%s' --exclude='*'" \
+ " | awk '{ print $5; }' | grep '.xml.gz' " \
+ % (repodata_path, "*-primary.xml.gz")
+ primarylist = execute_shell(cmd)
+ if primarylist and type(primarylist) == list:
+ dstdir = os.path.join(self.profile_basedir, repo)
+ self.copy_rsync_rpm(repodata_path, primarylist, dstdir)
+ else:
+ print 'No primary repodata found'
+ clear_primary_files(self)
+ return 3
+
+ depends_path = os.path.join(rsyncd,snapshotdir,"builddata","depends")
+ cmd = "rsync %s/ --list-only --include='%s' --exclude='*'" \
+ " | awk '{ print $5; }' | grep '_revpkgdepends.xml' " \
+ % (depends_path, "*_revpkgdepends.xml")
+ deplist = execute_shell(cmd)
+ if deplist and type(deplist) == list:
+ dstdir = os.path.join(self.profile_basedir)
+ self.copy_rsync_rpm(depends_path, deplist, dstdir)
+ else:
+ clear_primary_files(self)
+ print 'No depends list found'
+ return 4
+
+ print "\n\n4) GENERATE PACKAGE RPM LIST for %s at %s " % (self.profile['name'], datetime.now())
+ for repo in self.profile_repos['repo']:
+ src_bin_map = {}
+ dstdir = os.path.join(self.profile_basedir, repo)
+ primarymd = os.path.join(dstdir, \
+ [ x for x in os.listdir(dstdir) \
+ if x.endswith('-primary.xml.gz') ][0])
+ # Read primary repo metadata
+ primary = xml_to_obj(gzip.open(os.path.join(dstdir, primarymd)))
+ print 'Removing primarymd %s' % primarymd
+ os.remove(primarymd)
+ for package in primary.package:
+ spec_name = re.search(r'(.*)-(.*)-(.*).src.rpm', package.format.sourcerpm).groups()[0]
+ if spec_name in src_bin_map:
+ src_bin_map[spec_name].append(package.location.href)
+ else:
+ src_bin_map[spec_name] = [package.location.href]
+ # Read builddep info
+ for _file in [ x for x in os.listdir(self.profile_basedir) \
+ if repo in x and x.endswith('_revpkgdepends.xml') ]:
+ with open(os.path.join(self.profile_basedir, _file)) as df:
+ depends = xml_to_obj(''.join(df.readlines()))
+ if not depends or not depends.package:
+ continue
+ for package in depends.package:
+ if package.source in src_bin_map and package.source != package.name:
+ src_bin_map[package.name] = src_bin_map.pop(package.source)
+ print '\nRemoving garbage buildepinfo files'
+ for f in os.listdir(self.profile_basedir):
+ if re.search('.*_%s_.*_revpkgdepends.xml' % repo, f):
+ os.remove(os.path.join(self.profile_basedir, f))
+
+ print "\n\n5) HARD LINK PACKAGE RPMS for %s at %s" % (repo, datetime.now())
+ target_arch = [ x for x in os.listdir(dstdir) if os.path.isdir(os.path.join(dstdir, x)) ]
+ #TODO: arch hack
+ if len(target_arch) != 1:
+ if 'i586' in target_arch:
+ target_arch.remove('i586')
+ if len(target_arch) != 1:
+ if 'x86_64' in target_arch:
+ target_arch.remove('x86_64')
+ print ' * %s... Start make link, target_arch : %s' % (repo, target_arch)
+ count = 0
+ for pkg_name in src_bin_map:
+ for y in src_bin_map[pkg_name]:
+ arch, rpm = y.split('/')
+ #TODO: i686 -> i586, noarch -> target_arch
+ arch = arch.replace('i686', 'i586').replace('noarch', target_arch[0])
+ src_file = os.path.join(dstdir,arch,':full', rpm)
+ pkg_dir = os.path.join(dstdir,arch,pkg_name)
+ dst_file = os.path.join(pkg_dir, rpm)
+ if not os.path.exists(src_file):
+ print ' not exist... %s, %s' % (src_file, y)
+ raise LocalError('missing rpms')
+ # Link rpms... /repo/arch/:full/*.rpm -> /repo/arch/pkg/*.rpm
+ if not os.path.exists(pkg_dir):
+ os.makedirs(pkg_dir)
+ if os.path.exists(dst_file):
+ os.remove(dst_file)
+ os.link(src_file, dst_file)
+ count += 1
+ print ' Total %d rpms linked at %s' % (count, datetime.now())
+ sys.stdout.flush()
+ print '\n\n'
+
+ def run_ref_import_rpm_obs(self, action=None):
+
+ print '---[JOB STARTED]-------------------------'
+
+ fields = trigger_info(os.getenv("TRIGGER_INFO"))
+
+ # Check if we've got required fieldsdk-rootstraps in TRIGGER_INFO
+ for field in ('profile', 'target', 'project', 'build_id', 'repo_path'):
+ if field not in fields:
+ print 'Error: TRIGGER_INFO doesn\'t contain %s' % field
+ return -1
+
+ self.profile = self.setup_profile(fields['profile'], fields['target'])
+ if not self.profile:
+ print 'Skip Sync OBS project %s' % fields['project']
+ return 0
+
+ buildid = fields['build_id'].split('_')[1]
+ snapshotdir = fields['repo_path']
+ rsyncd = os.getenv('IMG_SYNC_DEST_BASE')
+ print 'rsyncd = %s snapshotdir = %s ' %(rsyncd, snapshotdir)
+ if not rsyncd and not snapshotdir and not buildid:
+ print "Please check rsync and snapshot dir. skip!!!"
+ return
+
+ print "1) Init a rpm files from download server "
+ for repo in self.profile_repos['repo']:
+
+ rpath = os.path.join(rsyncd,snapshotdir,"repos",repo)
+ repospath = os.path.join(rsyncd,snapshotdir,"repos")
+ noarchstring = "*.noarch.rpm"
+
+ print "1-1) repo = %s" %(repo)
+ for arch , path in self.profile_repos[repo]:
+ print "1-2) repo = %s , arch = %s" %(repo,arch)
+ rescan_after_delete = False
+ for del_x in os.listdir(os.path.join(self.profile_basedir, repo, arch)):
+ del_path = os.path.join(self.profile_basedir, repo, arch, del_x)
+ if os.path.isdir(del_path):
+ shutil.rmtree(del_path)
+ rescan_after_delete = True
+ if rescan_after_delete:
+ del_cmd = "obs_admin --rescan-repository %s %s %s " \
+ %(self.profile['refprj'],repo,arch)
+ subprocess.call(del_cmd, shell=True)
+
+ archstring = "*.%s.rpm" % (arch)
+
+ #print rpath
+ cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+ " | awk '{ print $5; }' | grep '.rpm' " \
+ % (rpath , archstring)
+ archfilelists = execute_shell(cmd)
+ if arch == 'i586':
+ print 'add i686 arch'
+ cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+ " | awk '{ print $5; }' | grep '.rpm' " \
+ % (rpath , "*.i686.rpm")
+ #print cmd
+ extrafilelists = execute_shell(cmd)
+ if extrafilelists:
+ if archfilelists:
+ archfilelists += extrafilelists
+ else:
+ archfilelists = extrafilelists
+
+ # make dir
+ dstdir = os.path.join(self.profile_basedir, repo, arch,":full")
+ #print dstdir
+ if not os.path.exists(dstdir):
+ os.mkdir(dstdir)
+
+ # Copy arch rpm binary
+ print "1-4) Copy arch rpm binary "
+ if archfilelists:
+ self.copy_rsync_rpm(repospath, archfilelists, dstdir)
+
+ # search noarch list
+ cmd = "rsync %s -r --list-only --include='%s' --include='*/' --exclude='*'" \
+ " | awk '{ print $5; }' | grep '.rpm' " \
+ % (rpath , noarchstring)
+ #print cmd
+ noarchfilelists = execute_shell(cmd)
+
+ # Copy noarch rpm binary
+ print "1-6) Copy noarch rpm binary "
+ if noarchfilelists:
+ self.copy_rsync_rpm(repospath, noarchfilelists, dstdir)
+
+ # Link rpms from :full to each package directories
+ #self.construct_srv_obs_build_project_repo_package(rsyncd, snapshotdir)
+
+ # obs-admin rescan
+ print "2) obs_admin --rescan-repository "
+ for repo in self.profile_repos['repo']:
+ for arch , path in self.profile_repos[repo]:
+ cmd = "obs_admin --rescan-repository %s %s %s " \
+ %(self.profile['refprj'],repo,arch)
+ subprocess.call(cmd, shell=True)
+
+ # Link rpms from :full to each package directories
+ if os.getenv("REF_IMPORT_RPM_WITH_SRC",0) == 1:
+ self.construct_srv_obs_build_project_repo_package(rsyncd, snapshotdir)
+
+ def main(self, action):
+
+ print('-----[JOB STARTED: importrpm_for_obs ]-----')
+ self.run_ref_import_rpm_obs(action[1] if len(action) == 2 else None)
+
+if __name__ == '__main__':
+ try:
+ trigger = ref_import_rpm_obs()
+ sys.exit(trigger.main(sys.argv))
+ except Exception as e:
+ print(e)
+ sys.exit(1)
+