From: Li Yi Date: Thu, 9 Aug 2012 10:48:18 +0000 (+0800) Subject: [jenkins repomaker]: add repomaker build script for jenkins job X-Git-Tag: 0.12~103 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=b1a4c6073a674cef1dea1c9e8396a80fb97c6d8d;p=services%2Fjenkins-scripts.git [jenkins repomaker]: add repomaker build script for jenkins job import repomaker from boss and do the same thing as a jenkins job --- diff --git a/boss-repomaker.py b/boss-repomaker.py new file mode 100755 index 0000000..76eaaa5 --- /dev/null +++ b/boss-repomaker.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +from __future__ import with_statement +from RuoteAMQP.participant import Participant +import os, sys, io +import ConfigParser +import optparse +from string import Template + +try: + import json +except ImportError: + import simplejson as json + +# Fallback configuration. If you need to customize it, copy it somewhere +# ( ideally to your system's configuration directory ), modify it and +# pass it with the -c option + +from common.envparas import export +import repomaker + +P_NAME = "repomaker" + +DEFAULTCONF = """ +[$pname] +# ${pname} specific conf + +# where the put the snapshots repos +builds = /srv/snapshots + +# configuration YAML file for supported repos +repo_conf = /etc/repos/repos.yaml + +# OBS apiurl to query repo status +obs_api = http://api.build.tizen.org +oscrc = /etc/boss/oscrc + +# path of OBS backend services, should be updates if OBS didn't use default +raw_repos = /srv/obs/repos +obs_triggers_path = /srv/obs/repos_sync +obs_building_path = /srv/obs/build + +# use armv8 trick for armv7hl with hw FL +no_armv8 = no + +# notification mail to specifed address +mailto = root@localhost + +# optional, needed if it is different with normal snapshots +;sandbox_repo_baseurl = http://hostname-or-ip/sandbox +""" + +CONFS = {} + +PARAM_LIST = ['BUILDS_PATH', + 'REPO_CONF', + 'OBS_API_URL', + 'OSCRC_PATH', + 'RAW_REPOS', + 'OBS_TRIGGERS_PATH', + 'OBS_BUILDING_PATH', + 'NO_ARMV8', + 'MAILTO', + 'SANDBOX_REPO_BASEURL', + 'EMAIL_TEMPLATES_DIR', + 'OBS_EVENT_STRING', + 'BUILD_TAG'] + +export(PARAM_LIST, locals()) + +def main(): + event = json.loads(' '.join(OBS_EVENT_STRING.split())) + event_fields = event['fields']['obsEvent'] + + if not event_fields: + print "Invalud OBS event: %s" %(OBS_EVENT_STRING) + sys.exit(-1) + + try: + from common.buildservice import BuildService + bs = BuildService(apiurl=CONFS['apiurl'], oscrc=CONFS['oscrc']) + except Exception, e: + print 'OBS access errors: ', str(e) + sys.exit(-1) + + repomgr = repomaker.RepoManager(bs, CONFS) + + print "%s job running" % P_NAME + + try: + status = repomgr.create(event_fields['project'], event_fields['repo']) + print status + if status: + with open('%s.env' %(BUILD_TAG), 'w') as f: + for k in status.keys(): + f.write("%s = %s\n" %(k, status[k])) + else: + sys.exit(-1) + except Exception, e: + print 'Error: ', str(e) + sys.exit(-1) + +if __name__ == "__main__": + parser = optparse.OptionParser() + parser.add_option("-c", "--config", dest="config", + help="specify configuration file") + parser.add_option("", "--get-defconf", dest="get_defconf", action="store_true", + help="Print out the default configuration file") + + (opts, args) = parser.parse_args() + + temp = Template(DEFAULTCONF) + str_conf = temp.substitute(pname=P_NAME) + + if opts.get_defconf: + print str_conf + sys.exit(0) + + if opts.config: + with open(opts.config) as f: + str_conf = f.read() + + config = ConfigParser.ConfigParser() + config.readfp(io.BytesIO(str_conf)) + + try: + # repomaker participant specific conf + CONFS['raw_repos'] = RAW_REPOS or config.get(P_NAME, 'raw_repos') + CONFS['repo_conf'] = REPO_CONF or config.get(P_NAME, 'repo_conf') + CONFS['apiurl'] = OBS_API_URL or config.get(P_NAME, 'obs_api') + CONFS['oscrc'] = OSCRC_PATH or config.get(P_NAME, 'oscrc') + CONFS['builds'] = BUILDS_PATH or config.get(P_NAME, 'builds') + except ConfigParser.NoOptionError, e: + print 'In config, %s' % str(e) + sys.exit(1) + + builds_dir = CONFS['builds'] + if not os.path.exists(builds_dir): + os.makedirs(builds_dir) + # the owner will be root, with 0777 permission + os.chmod(builds_dir, 0777) + + try: + CONFS['obs_triggers_path'] = OBS_TRIGGERS_PATH or config.get(P_NAME, 'obs_triggers_path') + except ConfigParser.NoOptionError: + CONFS['obs_triggers_path'] = '/srv/obs/repos_sync' + + try: + CONFS['obs_building_path'] = OBS_BUILDING_PATH or config.get(P_NAME, 'obs_building_path') + except ConfigParser.NoOptionError: + CONFS['obs_building_path'] = '/srv/obs/build' + + try: + mailto = MAILTO or config.get(P_NAME, 'mailto') + CONFS['mailto'] = filter(None, [s.strip() for s in mailto.split(',')]) + except ConfigParser.NoOptionError: + CONFS['mailto'] = [] + + try: + CONFS['no_armv8'] = (NO_ARMV8.lower() or config.get(P_NAME, 'no_armv8').lower()) == 'yes' + except ConfigParser.NoOptionError: + CONFS['no_armv8'] = False + + try: + CONFS['sandbox_repo_baseurl'] = SANDBOX_REPO_BASEURL or config.get(P_NAME, 'sandbox_repo_baseurl') + except ConfigParser.NoOptionError: + CONFS['sandbox_repo_baseurl'] = None + + CONFS['email_templates_dir'] = EMAIL_TEMPLATES_DIR + + # check the toplevel dirs in repos.yaml + dirs = repomaker.get_toplevel_dirs_from_repoconf(CONFS['repo_conf']) + for d in dirs: + if not os.path.exists(d): + os.makedirs(d) + # the owner will be root, with 0777 permission + os.chmod(d, 0777) + + # UGLY code need to be removed + tmpdir = '/srv/tmp' + if not os.path.exists(tmpdir): + os.makedirs(tmpdir) + # the owner will be root, with 0777 permission + os.chmod(tmpdir, 0777) + + main() diff --git a/common/buildservice.py b/common/buildservice.py index c77e407..a2340ea 100644 --- a/common/buildservice.py +++ b/common/buildservice.py @@ -957,6 +957,14 @@ class BuildService(object): r.append((rev, srcmd5, version, t, user, comment)) return r + def getProjectConfig(self, project): + """ + getProjectConfig(project) -> string + + Get buliding config of project + """ + return ''.join(core.show_project_conf(self.apiurl, project)) + def getProjectMeta(self, project): """ getProjectMeta(project) -> string diff --git a/repomaker/__init__.py b/repomaker/__init__.py new file mode 100644 index 0000000..5373c2f --- /dev/null +++ b/repomaker/__init__.py @@ -0,0 +1,548 @@ +#!/usr/bin/env python + +# Copyright (c) 2009 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the Free +# Software Foundation; version 2 of the License +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY +# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +# for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., 59 +# Temple Place - Suite 330, Boston, MA 02111-1307, USA. + +try: + import json +except ImportError: + import simplejson as json + +from linkrepo import LinkRepo +import os, sys +import re +from time import sleep +from datetime import datetime +import tempfile +import stat +import shutil +import xml.etree.ElementTree as ET +import yaml + + +ARCHES = {'ia32': 'i586', 'armv7l': 'armv7el', 'armv7hl': 'armv8el'} +ARCHES_REV = {'i586': 'ia32', 'armv7el': 'armv7l', 'armv8el': 'armv7hl'} + +# fixing of buggy xml.dom.minidom.toprettyxml +XMLTEXT_RE = re.compile('>\n\s+([^<>\s].*?)\n\s+ %s %s is %s" %(prj, t, state[t]) + count = 1 + while ( state[t] == 'publishing' or \ + state[t] == 'finished' or \ + state[t] == 'scheduling' ) and count < 100: + print "repo %s status is %s, waiting..." % (t, state[t]) + sys.stdout.flush() + + sleep(5) + state = self.bs.getRepoState(prj) + count = count + 1 + + if state[t] != 'published': + print ">> %s %s is still in '%s', skip" %(prj, t, state[t]) + ret = False + break + + return ret + + def update_image_configs(self, dest_repo, dest_pkg): + """ Checking for patterns at: rawrepo/repodata/{group.xml,patterns.xml}.gz + """ + import re + + pg = re.compile("image-configurations.*.rpm") + found = False + for root, dirs, files in os.walk("%s" %dest_pkg): + for f in files: + if re.match(pg, f): + print "image configurations found: %s/%s" %(root,f) + tmpdir = tempfile.mkdtemp() + pwd = os.getcwd() + os.chdir(tmpdir) + os.system("rpm2cpio %s/%s | cpio -idmv" %(root,f)) + os.system("mkdir -p %s/builddata" %dest_repo) + os.system("rm %s/builddata/image-configs/*.ks" %(dest_repo)) + os.system("mkdir -p %s/builddata/image-configs" %dest_repo) + if os.path.exists("%s/usr/share/image-configurations/image-configs.xml" %tmpdir): + shutil.copyfile("%s/usr/share/image-configurations/image-configs.xml" %tmpdir, '%s/builddata/image-configs.xml' %dest_repo) + os.system("cp %s/usr/share/image-configurations/*.ks %s/builddata/image-configs" %(tmpdir, dest_repo)) + os.chdir(pwd) + shutil.rmtree(tmpdir) + found = True + break + + if found: break + + if not found: + print "did not find any image configurations" + + def create(self, prj, target): + + if not prj or not target: + print '"project" and "target" must be specified in params' + return None + + repo = self._get_repo( prj, target) + if not repo: + print "No repos meta in %s for %s/%s" % (self.conf['repo_conf'], prj, target) + return None + + if repo['PartOf'] == 'sandbox': + if 'SandboxOf' not in repo or \ + not self._get_repo_by_name(repo['SandboxOf']): + print 'Invalid sandbox repo settings for %s' % prj + return None + + wi = {} + + print "==========================" + print datetime.today() + print "Repo: %s" %repo['Name'] + print "==========================" + + if 'BaseDir' in repo: + builds_base = repo['BaseDir'] + else: + builds_base = self.conf['builds'] + + rname = repo['Name'] + # see if all related repos are published and prepare for a lockdown and a snapshot + ready = {} + link = {} + for arch in repo['archs']: + print "Checking status of %s" %arch + ready[arch] = True + link[arch] = True + ret = self._check_published(self._get_repo_by_name(rname), arch) + print "ret: %s" %ret + if not ret: + print "%s is not ready yet, can't create a snapshot" %rname + ready[arch] = False + link[arch] = False + continue + + if 'DependsOn' in repo: + toprepo = self._get_repo_by_name(repo['DependsOn']) + if self._check_published(toprepo, arch): + print '%s depends on %s which is published' %(rname, repo['DependsOn'] ) + if 'Dependents' in toprepo: + for d in toprepo['Dependents']: + deprepo = self._get_repo_by_name(d) + if not self._check_published(deprepo, arch): + ready[arch] = False + print "%s is not ready yet, can't create a snapshot" %d + break + else: + print '%s depends on %s which is not published yet' %(rname, repo['DependsOn'] ) + ready[arch] = False + + elif 'Dependents' in repo: + for d in repo['Dependents']: + deprepo = self._get_repo_by_name(d) + if not self._check_published(deprepo, arch): + ready[arch] = False + print "%s is not ready yet, can't create a snapshot" %d + break + + if 'DependsOn' in deprepo and rname in deprepo['DependsOn']: + toprepo = self._get_repo_by_name(deprepo['DependsOn']) + if not self._check_published(toprepo, arch): + ready[arch] = False + print "%s is not ready yet, can't create a snapshot" %deprepo['DependsOn'] + break + + status = True + for arch in repo['archs']: + if link[arch]: + print "Creating repo for %s %s arch: %s, " %(prj, target, arch), + + if 'GpgKey' in repo: + gpgkey = repo['GpgKey'] + else: + gpgkey = None + + if 'SignUser' in repo: + signer = repo['SignUser'] + else: + signer = None + + lr = LinkRepo(self.conf['raw_repos'], gpgkey, signer) + + liverepo = {'prj': prj, + 'target': target, + 'arch': arch, + } + + # support of 'Sandbox' repos and images for devel prjs + if repo['PartOf'] == 'sandbox': + brepo = self._get_repo_by_name(repo['SandboxOf']) + baserepo = {'prj': brepo['Project'], + 'target': brepo['Target'], + 'arch': arch, + } + else: + baserepo = None + + # support for hidden binary rpms to be included in snapshot + if 'ExtraRpms' in repo: + extrarpms = repo['ExtraRpms'] + else: + extrarpms = None + + # whether to put obs project build conf to repodata dir + if 'ProjectConfig' in repo and repo['ProjectConfig']: + prjconf = self.bs.getProjectConfig(prj) + else: + prjconf = None + + status = lr.linkrepo(self.conf['obs_triggers_path'], liverepo, repo['Location'], baserepo, extrarpms, prjconf) + if not os.path.exists("%s/builddata/image-configs.xml" %(repo['Location'])) and status: + self.update_image_configs(repo['TopLevel'], "%s/ia32/packages" %repo['Location']) + print "result: %s" %( "Ok" if status else "Error") + + if not status: + wi['snapshot'] = False + return None + + Go = False + if ready['ia32']: + for rr in ready.keys(): + if ready[rr]: + Go = True + + if Go: + tmprepo = tempfile.mkdtemp(prefix='repomaker-', dir='/srv/tmp') + os.makedirs("%s/repos" %tmprepo, 0755) + os.system("cp -al %s/builddata %s" %(repo['TopLevel'], tmprepo)) + + xmlroot = ET.Element("build") + xarch = ET.SubElement(xmlroot, "archs") + xrepo = ET.SubElement(xmlroot, "repos") + + for arch in repo['archs']: + if ready[arch]: + ET.SubElement(xarch, "arch").text = arch + + for i in os.listdir("%s/repos" %repo['TopLevel']): + print "working on %s" %i + if not os.path.exists("%s/repos/%s" %(tmprepo,i)): + os.makedirs("%s/repos/%s" %(tmprepo,i), 0755) + + # source repo + os.system("cp -al %s/repos/%s/source %s/repos/%s" %(repo['TopLevel'], i, tmprepo, i)) + ET.SubElement(xrepo, "repo").text = i + + # arch specific repos + for arch in repo['archs']: + if ready[arch]: + os.system("cp -al %s/repos/%s/%s %s/repos/%s" %(repo['TopLevel'], i, arch, tmprepo, i)) + + # decide to put which project's build.conf under 'builddata' + prjconf = self._get_buildbase_conf(rname) + if prjconf: + import hashlib + xconf = ET.SubElement(xmlroot, "buildconf") + prjconf_fn = "%s-build.conf" % hashlib.sha256(prjconf).hexdigest() + xconf.text = prjconf_fn + + # creating a snapshot is basically a copy of the daily build to + # a new location with a build ID. + # once snapshot is created, we can start image creation process + print "We are ready to create a snapshot for %s (and all other related repos)" %rname + + if 'TopLevel' in repo and 'PartOf' in repo: + from release import BuildID + bid = BuildID() + build_id = bid.get_new_build_id(release=repo['Release'], + type=repo['PartOf'], # sandbox or not + sandbox_prj=repo['Project']) + ET.SubElement(xmlroot, "id").text = build_id + + if 'SnapshotDir' in repo: + top = repo['SnapshotDir'] + else: + top = builds_base + + if not os.path.isdir(top): + os.makedirs(top, 0755) + + snapshotdir = "%s/%s" %(top, build_id) + + print "linking %s to %s" %(tmprepo, snapshotdir) + os.system("cp -al %s %s" %(tmprepo, snapshotdir)) + os.chmod(snapshotdir, 0755) + + if 'Link' in repo: + # create symbolic links + if os.path.exists("%s/%s" %(top, repo['Link'])): + os.remove("%s/%s" %(top, repo['Link'])) + print "Creating symlink %s -> %s/%s" %(snapshotdir, top, repo['Link']) + os.symlink(build_id, "%s/%s" %(top, repo['Link'])) + + wi['build_id'] = build_id + wi['snapshot'] = True + + template_str = self._read_template('new_build') + msg = [] + try: + from Cheetah.Template import Template + template = Template(template_str, searchList = wi ) + template.msg = "\n".join(msg) + body = str(template) + except ImportError: + from string import Template + template = Template(template_str) + body = template.safe_substitute(wi, msg="\n".join(msg)) + + wi['body'] = body.replace('\n', '\\n') + wi['snapshot_subject'] = 'Snapshot %s created' %(build_id) + + tree = ET.ElementTree(xmlroot) + from xml.dom import minidom + rough_string = ET.tostring(xmlroot, 'utf-8') + reparsed = minidom.parseString(rough_string) + + # make sure 'builddata' dir exists even no 'image-configurations.rpm' + try: + os.makedirs("%s/builddata" % snapshotdir) + except OSError: + pass + + xf = open("%s/builddata/build.xml" %(snapshotdir), 'w') + xf.write(XMLTEXT_RE.sub('>\g<1> " + parser = optparse.OptionParser(usage) + opts, args = parser.parse_args() + + try: + prj, target, dest_repo, arch = args[:4] + except ValueError: + parser.error('parameter wrong, please see usage') + sys.exit(1) + + linker = LinkRepo('/srv/obs/repos') + repo = {'prj': prj, 'target': target, 'arch': arch} + linker.linkrepo('/srv/obs/repos_sync', repo, dest_repo) diff --git a/repomaker/release.py b/repomaker/release.py new file mode 100644 index 0000000..fc3911f --- /dev/null +++ b/repomaker/release.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +from datetime import date, datetime, timedelta +import os + +RELEASE_SPOOL = '/var/spool/repomaker' + +def _simple_buildid(today = None): + if today is None: + today = date.today() + return today.strftime('%Y%m%d') + +class BuildID: + def __init__(self, spool = RELEASE_SPOOL): + if not os.path.exists(spool): + os.makedirs(spool) + + self.spool = spool + + def _get_latest_build_id(self, type, release): + f = open("%s/%s-%s" %(self.spool, type, release) ) + latest = f.readline() + return latest + + def _save_new_build_id(self, id, type, release): + f = open("%s/%s-%s" %(self.spool, type, release), 'w') + f.write(id) + f.close() + + def _get_release_number(self, release, today = None): + return _simple_buildid(today) + + def _get_current_release(self): + return 'tizen' + + def get_new_build_id(self, release = None, type='release', sandbox_prj=None, today=None): + if release is None: + release = self._get_current_release() + + buildid = self._get_release_number(release, today) + + if type == 'sandbox' and sandbox_prj is not None: + type = 'sandbox-' + sandbox_prj.replace(':', '-') + + if os.path.exists("%s/%s-%s" %(self.spool, type, release) ): + latest = self._get_latest_build_id(type, release) + if buildid in latest: + # same day, new build number + l = latest.split(".") + build = int(l[-1]) + 1 + buildid += '.%s' %build + else: + buildid += '.1' + else: + buildid += '.1' + + self._save_new_build_id(buildid, type, release) + + if type.startswith('sandbox'): + return '_'.join([release + '-' + type, buildid]) + else: + return '_'.join([release, buildid]) + +if __name__ == "__main__": + # verify spool + print 'Two serial buildid for one day:' + bid = BuildID() + id = bid.get_new_build_id() + print id + id = bid.get_new_build_id() + print id + + id = bid.get_new_build_id('tizen-99') + print id + id = bid.get_new_build_id('tizen-99') + print id