+"""Common functions used in temp packages' builds"""
+
import os
+import pwd
import time
import tempfile
-import runner
+from common import runner
import re
import shutil
-from buildservice import BuildService
+from common.buildservice import BuildService
from urllib import quote_plus
from osc import core
]
def is_same_arch(arch_a, arch_b):
+ """Judge whether one arch is same as another or not"""
+
for archs in ARCH_LIST:
if arch_a in archs and arch_b in archs:
return True
return False
def safe_strip(astr):
+ """strip wrapper"""
try:
return astr.strip()
- except:
+ except AttributeError:
return astr
+class TempBuildPkgError(Exception):
+ """A local Exception handler"""
+ pass
+
class BuildService2(BuildService):
- """ """
+ """The BuildService2 Class inheritted from BuildService"""
def __init__(self, apiurl, apiuser, apipasswd):
BuildService.__init__(self, apiurl, apiuser, apipasswd)
self.apiurl = apiurl
raise ET.ParseError, "Internal Error"
for result in tree.findall('result'):
- targets[('/'.join((result.get('repository'), result.get('arch'))))] = result.get('state')
+ targets[('/'.join((result.get('repository'),
+ result.get('arch'))))] = result.get('state')
return targets
-
def delete_project(self, project):
""" overwirte core deleteproject method, due to it always not work """
- u = core.makeurl(self.apiurl, ['source', project])
- os.system('curl -X DELETE %s -k --user %s:%s '%(u,
+ url = core.makeurl(self.apiurl, ['source', project])
+ os.system('curl -X DELETE %s -k --user %s:%s '%(url,
self.apiuser,
self.apipasswd))
if meta:
return meta
- except:
+ except TempBuildPkgError:
time.sleep(2)
- print "WARN: retrying %s" %retry
+ print "WARN: retrying %s" % retry
retry = retry - 1
return "</>"
def get_project_desp(self, project):
+ """Get project description though project"""
+
meta_xml = self.get_meta(project)
desp = ET.fromstringlist(meta_xml).find('description')
if desp is not None:
return None
def get_project_arch(self, project, repo_name):
+ """Get project arch though project and repo name"""
+
meta_xml = self.get_meta(project)
xml_root = ET.fromstringlist(meta_xml)
for repo_setting in xml_root.findall('repository'):
depend_project = []
def search_linked_project(project_name):
+ """Search projects which are linked"""
meta_xml = self.get_meta(project_name)
xml_root = ET.fromstringlist(meta_xml)
for repo_setting in xml_root.findall('repository'):
class TempPackageBuild:
+ """The TempPackageBuild Class"""
def __init__(self, apiurl, apiuser, apipasswd, project, package = None):
- self.bs = BuildService(apiurl, apiuser, apipasswd)
+ self._bs = BuildService2(apiurl, apiuser, apipasswd)
self.project = project
if not package:
- pkgs = self.get_package_list(self.project)
+ pkgs = self._bs.get_package_list(self.project)
if len(pkgs) == 1:
self.package = pkgs[0]
else:
self.package = package
- def remote_build(self, dest_project, git_project, spec, packagingdir, message = ''):
+ def remote_build(self, dest_project, git_project, spec, packagingdir,
+ message = ''):
""" Use gbs remotebuild to send local git tree to OBS as obs package """
- if not self.bs.get_targets(dest_project):
- print '\n----------\nOBS %s project do not have repo setting, quit build check against to it\n----------' % dest_project
+ if not self._bs.get_targets(dest_project):
+ print '\n----------\nOBS %s project do not have repo setting, quit'
+ 'build check against to it\n----------' % dest_project
return 'NoRepo'
- with open('%s/packaging/git_info' % git_project, 'w') as fh:
- fh.write(message)
- runner.show("gbs -v rb --include-all -B %s -T %s --spec %s --packaging-dir %s %s" %(dest_project, self.project, spec, packagingdir, git_project))
+ with open('%s/packaging/git_info' % git_project, 'w') as _fh:
+ _fh.write(message)
+ runner.show("gbs -v rb --include-all -B %s -T %s --spec %s"
+ "--packaging-dir %s %s" % (dest_project, self.project, spec,
+ packagingdir, git_project))
os.remove("%s/packaging/git_info" % git_project)
def wait_build_finish(self):
loop = True
while loop:
time.sleep(30)
- status = self.bs.get_results(self.project, self.package)
+ status = self._bs.get_results(self.project, self.package)
if not status:
break
for build_repo in status.keys():
else:
loop = False
- # We might need sometime to wait the 'finished' status change to final status
+ # We might need sometime to wait the 'finished' status change to
+ # final status
time.sleep(10)
- status = self.bs.get_results(self.project, self.package)
+ status = self._bs.get_results(self.project, self.package)
return status
def get_build_result(self):
""" return build result of this package """
try:
- return self.bs.get_results(self.project, self.package)
- except Exception, ex:
- print Exception, ":", ex
+ return self._bs.get_results(self.project, self.package)
+ except TempBuildPkgError, tbpe:
+ print TempBuildPkgError, ":", tbpe
return None
def get_build_log(self, repo, arch):
""" try to return build log, any except is assumed no build on OBS """
try:
# get last 200 lines if build log exceed 200 lines
- return '\n'.join(self.bs.get_build_log(self.project, "%s/%s" %(repo, arch), self.package).splitlines()[-200:])
- except:
+ return '\n'.join(self._bs.get_build_log(self.project, "%s/%s" %
+ (repo, arch), self.package).splitlines()[-200:])
+ except TempBuildPkgError:
return "No build log"
def get_git_info(self):
""" parse git info of this package from commit log """
git_info = {}
- # sometimes source link does not work when repo published, so retry 10 times
- # to wait obs to fix it automatically
+ # sometimes source link does not work when repo published,
+ # so retry 10 times to wait obs to fix it automatically
retry_count = 10
while retry_count > 0:
try:
- comment = self.bs.get_src_file_content(self.project, self.package, 'git_info')
+ comment = self._bs.get_src_file_content(self.project,
+ self.package, 'git_info')
break
- except Exception, ex:
- print Exception, ":", ex, 'retrying...'
+ except TempBuildPkgError, tbpe:
+ print TempBuildPkgError, ":", tbpe, 'retrying...'
time.sleep(3)
retry_count -= 1
for line in comment.split('\n'):
try:
- key, value = line.split(':',1)
+ key, value = line.split(':', 1)
git_info[key.strip()] = value.strip()
- except:
+ except TempBuildPkgError:
print '-------------------------------------'
- print 'key:value %s' %(line)
- pass
+ print 'key:value %s' % line
return git_info
def del_itself(self):
- self.bs.delete_project(self.project)
+ """Delete obs project though Project name"""
+
+ self._bs.delete_project(self.project)
class ProjectBuildService():
""" BuildService Class for one certain project """
def __init__(self, project, apiurl, apiuser, apipasswd):
self.project = project
- self.bs = BuildService2(apiurl = apiurl,
+ self._bs = BuildService2(apiurl = apiurl,
apiuser = apiuser,
apipasswd = apipasswd)
def get_repostatus(self):
- return self.bs.get_repo_state(self.project)
+ """Get repo status"""
+
+ return self._bs.get_repo_state(self.project)
def cleanup(self):
- return self.bs.delete_project(self.project)
+ """Delete obs project though project name"""
+
+ return self._bs.delete_project(self.project)
def is_published(self):
+ """Return True if repo is published, otherwise False"""
+
try:
return set(self.get_repostatus().values()) == set(['published'])
- except:
+ except TempBuildPkgError:
return False
def get_live_repo_path(self):
+ """Get the path of live repo"""
+
# FIXME: hard code here
- return "/srv/obs/repos/%s" %(self.project.replace(":", ":/"))
+ return "/srv/obs/repos/%s" % self.project.replace(":", ":/")
+
+ @staticmethod
+ def _su_osbrun():
+ """Run obs with su"""
- def _su_osbrun(self):
- os.setuid(getpwnam('obsrun').pw_uid)
- os.setgid(getpwnam('obsrun').pw_gid)
+ name_entry = pwd.getpwnam('obsrun')
+ os.setuid(name_entry.pw_uid)
+ os.setgid(name_entry.pw_gid)
def disable_build(self):
""" disable the project"""
- origin_meta = self.bs.get_meta(self.project)
+ origin_meta = self._bs.get_meta(self.project)
root = ET.fromstringlist(origin_meta)
build = root.find('build')
suffix=".xml", text=True)
os.write(fileh, ET.tostring(root))
- self.bs.update_meta(self.project, filename)
+ self._bs.update_meta(self.project, filename)
os.close(fileh)
os.unlink(filename)
def linkbuild_localdep(self, desp=''):
""" Link the project build against repo with dependency (localdep)"""
- origin_meta = self.bs.get_meta(self.project)
+ origin_meta = self._bs.get_meta(self.project)
root = ET.fromstringlist(origin_meta)
for repo_setting in root.findall('repository'):
suffix=".xml", text=True)
os.write(fileh, ET.tostring(root))
- self.bs.update_meta(self.project, filename)
+ self._bs.update_meta(self.project, filename)
os.close(fileh)
os.unlink(filename)
class RepoConf:
+ """The RepoConf class"""
+
def __init__(self, repos_conf_file):
if os.path.isfile(repos_conf_file):
self.repos = yaml.load(file(repos_conf_file))['Repositories']
else:
- raise Exception, "Fatal: Invalid repo configuration file: %s" %(repos_conf_file)
+ raise Exception, \
+ "Fatal: Invalid repo configuration file: %s" % \
+ repos_conf_file
def get_repo(self, project_name):
+ """Get repo though project name"""
for repo in self.repos:
if 'Project' not in repo or 'Target' not in repo:
if repo['Project'] == project_name:
return repo
return None
+
def get_local_base(self, project_name):
+ """Get local base path though project name"""
+
repo = self.get_repo(project_name)
if repo:
return repo['SnapshotDir']
def get_path_to_project(self, project_name):
+ """Get project path though project name"""
+
repo = self.get_repo(project_name)
if repo:
- path_to_project = repo['Location'].replace(repo['TopLevel'], '')
+ path_to_project = repo['Location'].replace(repo['TopLevel'], '')
if path_to_project.startswith('/'):
return path_to_project[1:]
else:
return path_to_project
def get_name(self, project_name):
+ """Get project name of repo"""
repo = self.get_repo(project_name)
if repo:
return repo['Name']
def get_repo_by_name(self, repo_name):
+ """Get repo though name"""
for repo in self.repos:
if 'Project' not in repo or 'Target' not in repo:
return None
def save_to_file(self, path_to_file, ext = None):
+ """Save content to file"""
stream = file(path_to_file, 'w')
if ext:
yaml.dump({"Repositories" : self.repos}, stream)
class BuildData:
+ """BuildData Class"""
+
def __init__(self):
self.__dict__['archs'] = []
self.__dict__['repos'] = []
def __setitem__(self, key, value):
return setattr(self, key, value)
+ def __delitem__(self, key):
+ delattr(self, key)
+
+ def __len__(self):
+ return len(self.__dict__)
+
+ def __getitem__(self, key):
+ return self.__dict__[key]
+
def save_to_file(self, path_to_file):
+ """Save content to file"""
impl = minidom.getDOMImplementation()
archs = dom.createElement('archs')
root.appendChild(archs)
- for arch in self.archs:
+ for arch in self.__dict__['archs']:
ele = dom.createElement('arch')
ele.appendChild(dom.createTextNode(arch))
archs.appendChild(ele)
repos = dom.createElement('repos')
root.appendChild(repos)
- for arch in self.repos:
+ for arch in self.__dict__['repos']:
ele = dom.createElement('repo')
ele.appendChild(dom.createTextNode(arch))
repos.appendChild(ele)
for attr in other_keys:
if hasattr(self, attr):
ele = dom.createElement(attr.lower())
- ele.appendChild(dom.createTextNode(getattr(self,attr)))
+ ele.appendChild(dom.createTextNode(getattr(self, attr)))
root.appendChild(ele)
builddate_xml = open(path_to_file, 'w')
be schedualed to runs on OBS server."""
def __init__(self, project, repo_conf = None,
- server_conf = {}, repo_category = 'standard'):
+ server_conf = (), repo_category = 'standard'):
self.project = project
self.repo_conf = repo_conf
- self.server_conf = server_conf
+ self.server_conf = dict(server_conf)
self.repo_category = repo_category
def get_live_repo_path(self, project = None):
+ """Get path of live repo"""
+
# OBS CONF HARDCODE
if not project:
project = self.project
self.repo_category)
def get_live_repo_pub_url(self, project = None):
+ """Get public url of live repo"""
+
if not project:
project = self.project
self.repo_category)
def get_latest_repo_path(self, project = None):
+ """Get the path of latest repo"""
+
if not project:
project = self.project
return None
def get_latest_repo_pub_url(self, project = None):
+ """Get latest pub url of repo"""
+
if project.startswith('home'):
return None
# TODO: ugly way to get latest repo pub url
url_base = self.server_conf.get('REPO_PUB_BASE_URL')
- local_path_to_project_list = self.get_latest_repo_path(project).split('/')
+ local_path_to_project_list = self.get_latest_repo_path(project).split(
+ '/')
join_path = url_base.split('/')[-1]
- url_to_project = '/'.join(
- local_path_to_project_list[local_path_to_project_list.index(join_path)+1:])
+ url_to_project = '/'.join(local_path_to_project_list[
+ local_path_to_project_list.index(join_path)+1:])
- return os.path.join(url_base,
- url_to_project)
+ return os.path.join(url_base, url_to_project)
def get_builddata_path(self, repo_path = None):
+ """Get build data path"""
# HARDCODE: builddata/
return os.path.join(repo_path if repo_path \
else self.get_latest_repo_path(),
'builddata')
def get_builddata(self, path = None):
+ """Get build data path"""
# HARDCODE: build.xml
if not path:
return os.path.join(self.get_builddata_path(), 'build.xml')
return os.path.join(path, 'build.xml')
def save_builddata(self, builddata, repo_path = None):
+ """Save build data to local file"""
+
builddata_path = self.get_builddata_path(repo_path)
if not os.path.isdir(builddata_path):
os.makedirs(builddata_path)
builddata.save_to_file(os.path.join(builddata_path,
'build.xml'))
- def save_buildlogs():
+ def save_buildlogs(self, live_repo_path):
+ """Save build log to local"""
# TODO: copy build logs
os.makedirs(os.path.join(live_repo_path, "builddata/buildlogs"))
- pass
def find_image_conf_pkg(self, path = None):
""" return image conf package abs path """
for root, dirs, files in os.walk(repopath):
if root.find('noarch') < 0:
continue
- for f in files:
- if re.match(pkg_pattern, f):
- print "image configurations found: %s/%s" %(root,f)
- return "%s/%s" %(root, f)
+ for conf in files:
+ if re.match(pkg_pattern, conf):
+ print "image configurations found: %s/%s" % (root, conf)
+ return "%s/%s" % (root, conf)
return None
def extract_image_ks(self, repo_path = None, path_to_image_conf_pkg = None):
os.system("rpm2cpio %s | cpio -idmv" %(image_conf_pkg))
os.system("cp %s/usr/share/image-configurations/*.ks %s/"\
%(tmpdir, image_conf_path))
-
- if os.path.exists("%s/usr/share/image-configurations/image-configs.xml" %tmpdir):
- shutil.copyfile("%s/usr/share/image-configurations/image-configs.xml" %tmpdir,
- '%s/image-configs.xml' %self.get_builddata_path(repo_path))
+ config_file = '%s/usr/share/image-configurations/image-configs.xml'
+ if os.path.exists(config_file % tmpdir):
+ shutil.copyfile(config_file %tmpdir, '%s/image-configs.xml' %
+ self.get_builddata_path(repo_path))
shutil.rmtree(tmpdir)
def get_image_conf(self, path = None):
+ """Get image conf path"""
+
if path:
return os.path.join(path,
'image-configs.xml')
'image-configs.xml')
def get_valid_image_ks(self, path = None):
+ """Get valid image ks information"""
+
ks_info = {}
valid_arch = []
print 'valid_arch list', valid_arch
if conf_str:
root = ET.XML(conf_str)
- for ks in root.findall('config'):
- for child in ks:
+ for ks_file in root.findall('config'):
+ for child in ks_file:
if child.tag == 'name':
name = safe_strip(child.text)
attr = {}
else:
attr[child.tag] = safe_strip(child.text)
# make sure this arch packages are released
- ks_arch = ks.find('arch').text
+ ks_arch = ks_file.find('arch').text
for arch in valid_arch:
if is_same_arch(ks_arch, arch):
ks_info[name] = attr