import ConfigParser
import optparse
from string import Template
-
+import re
+import yaml
try:
import json
except ImportError:
import simplejson as json
from common.envparas import export
+from common.buildservice import BuildService
import repomaker
import base64
P_NAME = "repomaker"
+class ServerConf:
+
+ def __init__(self):
+ pass
+ def __getitem__(self, key):
+ return getattr(self, key)
+
CONFS = {}
-PARAM_LIST = ['BUILDS_PATH',
- 'REPO_CONF',
+PARAM_LIST = ['PATH_BUILDS',
+ 'PATH_REPO_CONF',
'OBS_API_URL',
- 'OSCRC_PATH',
+ 'PATH_OSCRC',
'WORKSPACE',
- 'RAW_REPOS',
+ 'PATH_RAW_REPOS',
'OBS_TRIGGERS_PATH',
'OBS_BUILDING_PATH',
'NO_ARMV8',
print 'Empty kickstart file list, skip!'
-def create_repo(event_fields):
+def create_repo(serverconf, event_fields):
try:
- from common.buildservice import BuildService
- bs = BuildService(apiurl=CONFS['apiurl'], oscrc=CONFS['oscrc'])
+
+ bs = BuildService(apiurl=serverconf.apiurl, oscrc=serverconf.oscrc)
except Exception, e:
print 'OBS access errors: ', str(e)
sys.exit(-1)
- repomgr = repomaker.RepoManager(bs, CONFS)
+ repomgr = repomaker.RepoManager(bs, serverconf)
print "%s job running" % P_NAME
status = repomgr.create(event_fields['project'], event_fields['repo'])
return status
+def temp_repo_define(event_files, repos_conf):
+ REPOCONF_TEMPLATE="""
+ Name: %(name)s
+ PartOf: "review"
+ TopLevel: /srv/builds/temp/%(partof)s/%(package)s
+ Location: /srv/builds/temp/%(partof)s/%(package)s/repos/tizen-mobile
+ Project: %(project)s
+ RepoRequire: %(reporequire)s
+ Target: %(target)s
+ SandboxOf: %(sandboxof)s
+ SnapshotDir: /srv/snapshots/pre-release/%(package)s
+ Release: "%(release)s"
+ Architectures: %(arch)s
+"""
+ project = event_files["project"]
+ repo = event_files["repo"]
+ match = re.match("(.*):prerelease:(\d*):(\w*)", project)
+ if not match:
+ raise Exception, "Ivalid project name found"
+
+ target_prj, sr_id, package = match.groups()
+
+ REPO_CONF_STR = REPOCONF_TEMPLATE %{
+ "name": project.replace(":","-"),
+ "partof": target_prj,
+ "package": package,
+ "sandboxof": target_prj,
+ "project": project,
+ "target": repo,
+ "release": "sr.%s" %sr_id,
+ "arch":["ia32"],
+ "reporequire": ["Tizen:2.0:Base"] #FIXME
+ }
+ print REPO_CONF_STR
+
+ return yaml.load(REPO_CONF_STR)
+
if __name__ == "__main__":
- CONFS['raw_repos'] = RAW_REPOS
- CONFS['repo_conf'] = REPO_CONF
- CONFS['apiurl'] = OBS_API_URL
- CONFS['oscrc'] = OSCRC_PATH
- CONFS['builds'] = BUILDS_PATH
- repo_conf_fn = None
- # JOB is triggered by other job
- if TEMP_REPO == 'TRUE':
- repo_conf_fh, repo_conf_fn = tempfile.mkstemp(text=True)
- os.write(repo_conf_fh,base64.b64decode(TEMP_REPO_CONF_STRING))
- CONFS['repo_conf'] = repo_conf_fn
- event = json.loads(base64.b64decode(OBS_EVENT_STRING))
- event_fields = event['fields']['obsEvent']
- else:
- # JOB is trigger by OBS event
- event = json.loads(' '.join(OBS_EVENT_STRING.split()))
- event_fields = event['fields']['obsEvent']
+ server_conf = ServerConf()
+ server_conf.raw_repos = PATH_RAW_REPOS
+ server_conf.repos_conf = PATH_REPO_CONF
+ server_conf.apiurl = OBS_API_URL
+ server_conf.oscrc = PATH_OSCRC
+ server_conf.builds = PATH_BUILDS
+ server_conf.obs_triggers_path = OBS_TRIGGERS_PATH or '/srv/obs/repos_sync'
+ server_conf.obs_building_path = OBS_BUILDING_PATH or '/srv/obs/build'
+ server_conf.mailto = filter(None, [s.strip() for s in MAILTO.split(',')]) or []
+ server_conf.no_armv8 = (NO_ARMV8.lower()) == 'yes' or False
+ server_conf.sandbox_repo_baseurl = SANDBOX_REPO_BASEURL or None
+ server_conf.email_templates_dir = EMAIL_TEMPLATES_DIR
+
+
+
+ repo_conf_fn = None
+ # JOB is trigger by OBS event
+ event = json.loads(' '.join(OBS_EVENT_STRING.split()))
+ event_fields = event['fields']['obsEvent']
+
if not event_fields:
print "Invalid OBS event: %s" %(OBS_EVENT_STRING)
sys.exit(-1)
- builds_dir = CONFS['builds']
+ # Pre-release image creation temp project
+ if event_fields["project"].find(":prerelease:") > 0:
+ repos_conf = repomaker.RepoConf(server_conf.repos_conf)
+ temp_conf_fh, temp_conf_fn = tempfile.mkstemp(text=True)
+ repos_conf.save_to_file(temp_conf_fn, ext = temp_repo_define(event_fields, repos_conf))
+ server_conf.repos_conf = temp_conf_fn
+
+ builds_dir = server_conf.builds
if not os.path.exists(builds_dir):
os.makedirs(builds_dir)
# the owner will be root, with 0777 permission
os.chmod(builds_dir, 0777)
- CONFS['obs_triggers_path'] = OBS_TRIGGERS_PATH or '/srv/obs/repos_sync'
- CONFS['obs_building_path'] = OBS_BUILDING_PATH or '/srv/obs/build'
- CONFS['mailto'] = filter(None, [s.strip() for s in MAILTO.split(',')]) or []
-
- CONFS['no_armv8'] = (NO_ARMV8.lower()) == 'yes' or False
-
- CONFS['sandbox_repo_baseurl'] = SANDBOX_REPO_BASEURL or None
-
- CONFS['email_templates_dir'] = EMAIL_TEMPLATES_DIR
# check the toplevel dirs in repos.yaml
- dirs = repomaker.get_toplevel_dirs_from_repoconf(CONFS['repo_conf'])
+ dirs = repomaker.get_toplevel_dirs_from_repoconf(server_conf.repos_conf)
for d in dirs:
if not os.path.exists(d):
os.makedirs(d)
os.makedirs(tmpdir)
# the owner will be root, with 0777 permission
os.chmod(tmpdir, 0777)
+ project = event_fields["project"]
+ match = re.match("(.*):prerelease:(\d*):(\w*)", project)
-
- repoinfo = create_repo(event_fields)
-
+ if not match:
+ repoinfo = create_repo(server_conf, event_fields)
+ else:
+ target_prj, sr_id, package = match.groups()
+
+ bs = BuildService(apiurl=server_conf.apiurl, oscrc=server_conf.oscrc)
+ a=repomaker.ReviewRepo(project, server_conf, bs)
+
+ a.create()
+ repoinfo = a.get_info()
+
import pprint
pp = pprint.PrettyPrinter(indent=4)
print pp.pprint(repoinfo)
import xml.etree.ElementTree as ET
import yaml
+from xml.dom import minidom
ARCHES = {'ia32': 'i586', 'armv7l': 'armv7el', 'armv7hl': 'armv8el'}
ARCHES_REV = {'i586': 'ia32', 'armv7el': 'armv7l', 'armv8el': 'armv7hl'}
return paths
+class BuildData:
+ def __init__(self):
+ self.data = {}
+ self.__dict__['archs'] = []
+ self.__dict__['repos'] = []
+
+ def save_to_file(self, path_to_file):
+
+ impl = minidom.getDOMImplementation()
+
+ dom = impl.createDocument(None, "build", None)
+ root = dom.documentElement
+
+ archs = dom.createElement('archs')
+ root.appendChild(archs)
+
+ for arch in self.archs:
+ ele = dom.createElement('arch')
+ ele.appendChild(dom.createTextNode(arch))
+ archs.appendChild(ele)
+
+ repos = dom.createElement('repos')
+ root.appendChild(repos)
+
+ for arch in self.repos:
+ ele = dom.createElement('arch')
+ ele.appendChild(dom.createTextNode(arch))
+ repos.appendChild(ele)
+
+ for attr in ['id', 'buildconf']:
+ if hasattr(self, attr):
+ ele = dom.createElement(attr)
+ ele.appendChild(dom.createTextNode(getattr(self,attr)))
+ root.appendChild(ele)
+
+ builddate_xml = open(path_to_file, 'w')
+ builddate_xml.write(XMLTEXT_RE.sub('>\g<1></', dom.toprettyxml(indent=" ")))
+ builddate_xml.close()
+
+class RepoConf:
+ def __init__(self, repos_conf_file):
+
+ if os.path.isfile(repos_conf_file):
+ self.repos = yaml.load(file(repos_conf_file))['Repositories']
+ else:
+ raise Exception, "Fatal: Invalid repo configuration file: %s" %(repos_conf_file)
+
+ def get_repo(self, project_name):
+
+ for repo in self.repos:
+ if 'Project' not in repo or 'Target' not in repo:
+ continue
+
+ if repo['Project'] == project_name:
+ return repo
+ return None
+
+ def get_repo_by_name(self, repo_name):
+
+ for repo in self.repos:
+ if 'Project' not in repo or 'Target' not in repo:
+ continue
+
+ if repo['Name'] == repo_name:
+ return repo
+ return None
+
+ def save_to_file(self, path_to_file, ext = None):
+
+ stream = file(path_to_file, 'w')
+ if ext:
+ self.repos.append(ext)
+ yaml.dump({"Repositories" : self.repos}, stream)
+
+
class RepoManager:
def __init__(self, bs, myconf = None):
self.conf = myconf
self.bs = bs
- if 'no_armv8' in myconf and myconf['no_armv8']:
+ if hasattr(myconf, 'no_armv8'):
self.no_armv8 = True
else:
self.no_armv8 = False
return ''
def _readin_repos(self):
- repos_file = "%s" %self.conf['repo_conf']
+ repos_file = "%s" %self.conf['repos_conf']
if os.path.isfile(repos_file):
return yaml.load(file(repos_file))['Repositories']
else:
repo = self._get_repo( prj, target)
if not repo:
- print "No repos meta in %s for %s/%s" % (self.conf['repo_conf'], prj, target)
+ print "No repos meta in %s for %s/%s" % (self.conf['repos_conf'], prj, target)
return None
if repo['PartOf'] == 'sandbox':
if 'SandboxOf' not in repo or \
- not self._get_repo_by_name(repo['SandboxOf']):
+ not self._get_repo_by_name(repo['SandboxOf']):
print 'Invalid sandbox repo settings for %s' % prj
return None
else:
Go = True
if Go:
- tmprepo = tempfile.mkdtemp(prefix='repomaker-', dir='/srv/tmp')
+ tmprepo = tempfile.mkdtemp(prefix='repomaker-', dir='/srv/snapshots/tmp/')
os.makedirs("%s/repos" %tmprepo, 0755)
os.system("cp -al %s/builddata %s" %(repo['TopLevel'], tmprepo))
return None
else:
return self.bs.getProjectConfig(brepo['Project'])
+class ReviewRepo(RepoManager):
+ """
+ ReviewRepo, defined for create repo when there is SR/any changes created.
+ Repos Configuration flag:
+ PartOf: 'review'
+ The following fields in repoconf will be used:
+ SandboxOf: define the base project this change build against
+ RepoRequire: define the additional repo required within repo
+ """
+ def __init__(self, project, server_conf, bs):
+ self.project = project
+ self.server_conf = server_conf
+ self.repos_define = RepoConf(server_conf.repos_conf)
+ self.bs = bs
+ self.reviewrepo = self.repos_define.get_repo(project)
+
+ assert(self.reviewrepo['PartOf'] == 'review')
+
+ RepoManager.__init__(self, bs, server_conf)
+
+ self.builddata = BuildData()
+
+ # keep the repo information
+ self.info = {}
+ self.info['ARCHS'] = self.reviewrepo['Architectures']
+ self.info['LOCATION'] = self.reviewrepo['Location']
+ self.info['TARGET'] = self.reviewrepo['Target']
+ self.info['snapshot'] = True # DEBUG
+ self.info['IMAGES_PATH'] = server_conf.builds # DEBUG
+
+ def get_info(self):
+ self.info['release_build_id'] = self.builddata.id
+
+ # if 'SnapshotDir' use for repo, need to send relative URI to image-dispatcher
+ # FIX ME
+ if 'SnapshotDir' in self.reviewrepo:
+ if self.reviewrepo['SnapshotDir'].startswith(self.server_conf.builds):
+ self.info['RELATIVE_URI'] = self.reviewrepo['SnapshotDir'][len(self.server_conf.builds):]
+
+ return self.info
+
+ def is_ready(self):
+ ready = {}
+ link = {}
+ rname = self.reviewrepo['Name']
+ for arch in self.reviewrepo['Architectures']:
+ print "Checking status of %s" %arch
+ ready[arch] = True
+ link[arch] = True
+ ret = self._check_published(self._get_repo_by_name(rname), arch)
+ print "ret: %s" %ret
+ if not ret:
+ print "%s is not ready yet, can't create a snapshot" %rname
+ ready[arch] = False
+ link[arch] = False
+ continue
+
+ if 'DependsOn' in self.reviewrepo:
+ toprepo = self._get_repo_by_name(self.reviewrepo['DependsOn'])
+ if self._check_published(toprepo, arch):
+ print '%s depends on %s which is published' %(rname, self.reviewrepo['DependsOn'] )
+ if 'Dependents' in toprepo:
+ for d in toprepo['Dependents']:
+ deprepo = self._get_repo_by_name(d)
+ if not self._check_published(deprepo, arch):
+ ready[arch] = False
+ print "%s is not ready yet, can't create a snapshot" %d
+ break
+ else:
+ print '%s depends on %s which is not published yet' %(rname, self.reviewrepo['DependsOn'] )
+ ready[arch] = False
+
+ elif 'Dependents' in self.reviewrepo:
+ for d in repo['Dependents']:
+ deprepo = self._get_repo_by_name(d)
+ if not self._check_published(deprepo, arch):
+ ready[arch] = False
+ print "%s is not ready yet, can't create a snapshot" %d
+ break
+
+ if 'DependsOn' in deprepo and rname in deprepo['DependsOn']:
+ toprepo = self._get_repo_by_name(deprepo['DependsOn'])
+ if not self._check_published(toprepo, arch):
+ ready[arch] = False
+ print "%s is not ready yet, can't create a snapshot" %deprepo['DependsOn']
+ break
+
+ return ready, link
+
+ def _get_required_repos(self):
+
+ if 'RepoRequire' in self.reviewrepo:
+ return [self.repos_define.get_repo(r) for r in self.reviewrepo['RepoRequire']]
+
+ def _link_required_repos(self, target_dir, requried_repos_list):
+ """ Create a link to the latest repo and return with the release ID"""
+ # Known issue: required repos only can refer to the snapshots under same release_id
+ if not os.path.isdir(target_dir):
+ os.system("mkdir -p %s" %target_dir)
+
+ release_id = os.path.basename(os.path.realpath("%s/latest" %(requried_repos_list[0]['SnapshotDir'])))
+
+ for req_repo in requried_repos_list:
+ relative_dir = req_repo['Location'].replace(req_repo['TopLevel'], '')
+ if relative_dir.startswith('/'):
+ relative_dir = relative_dir[1:]
+ print "cp -al %s %s" %(os.path.join(req_repo['SnapshotDir'], release_id, relative_dir),
+ os.path.join(target_dir, os.path.basename(req_repo['Location'])))
+ os.system("cp -al %s %s" %(os.path.join(req_repo['SnapshotDir'], release_id, relative_dir),
+ os.path.join(target_dir, os.path.basename(req_repo['Location']))))
+ return release_id
+ def create(self):
+
+ print "========================================"
+ print " Createing repository snapshot: %s" %self.reviewrepo['Name']
+
+ required_repos = self._get_required_repos()
+ # Create links to required repos
+ if required_repos:
+ release_id = self._link_required_repos("%s/repos/" %(self.reviewrepo['TopLevel']), required_repos)
+
+ print " Addtional repo: %s" ','.join([r['Name'] for r in required_repos])
+ print " Addtional repo release id %s" %(release_id)
+ else:
+ release_id = NULL # FIXME
+
+ print "========================================"
+ print self.conf
+
+ repo = self.reviewrepo
+ prj = self.project
+ target = self.reviewrepo['Target']
+
+ if 'BaseDir' in repo:
+ builds_base = repo['BaseDir']
+ else:
+ builds_base = self.server_conf.builds
+
+ rname = repo['Name']
+
+ ready, link = self.is_ready()
+ status = True
+ for arch in repo['Architectures']:
+ if link[arch]:
+ print "Creating repo for %s %s arch: %s, " %(prj, target, arch),
+
+ if 'GpgKey' in repo:
+ gpgkey = repo['GpgKey']
+ else:
+ gpgkey = None
+
+ if 'SignUser' in repo:
+ signer = repo['SignUser']
+ else:
+ signer = None
+
+ lr = LinkRepo(self.server_conf.raw_repos, gpgkey, signer)
+
+ liverepo = {'prj': prj,
+ 'target': target,
+ 'arch': arch,
+ }
+
+ # support of 'Sandbox' repos and images for devel prjs
+ if repo['PartOf'] == 'sandbox':
+ brepo = self._get_repo(repo['SandboxOf'], target)
+ baserepo = {'prj': brepo['Project'],
+ 'target': brepo['Target'],
+ 'arch': arch,
+ }
+ else:
+ baserepo = None
+
+ # support for hidden binary rpms to be included in snapshot
+ if 'ExtraRpms' in repo:
+ extrarpms = repo['ExtraRpms']
+ else:
+ extrarpms = None
+
+ # whether to put obs project build conf to repodata dir
+ if 'ProjectConfig' in repo and repo['ProjectConfig']:
+ prjconf = self.bs.getProjectConfig(prj)
+ else:
+ prjconf = None
+
+ status = lr.linkrepo(self.server_conf.obs_triggers_path, liverepo, repo['Location'], baserepo, extrarpms, prjconf)
+ if not os.path.exists("%s/builddata/image-configs.xml" %(repo['Location'])) and status:
+ self.update_image_configs(repo['TopLevel'], "%s/ia32/packages" %repo['Location'])
+ print "result: %s" %( "Ok" if status else "Error")
+
+ if not status:
+ wi['snapshot'] = False
+ return None
+
+ Go = False
+ if ready['ia32']:
+ for rr in ready.keys():
+ if ready[rr]:
+ Go = True
+
+ if Go:
+
+ print "========================================"
+ print "Creating temporary repo, copying rpms "
+ sys.stdout.flush()
+
+ tmprepo = tempfile.mkdtemp(prefix='repomaker-', dir='/srv/snapshots/tmp/')
+ os.makedirs("%s/repos" %tmprepo, 0755)
+ os.system("cp -al %s/builddata %s" %(repo['TopLevel'], tmprepo))
+
+ for arch in repo['Architectures']:
+ if ready[arch]:
+ self.builddata.archs.append(arch)
+
+ for i in os.listdir("%s/repos" %repo['TopLevel']):
+ print "working on %s" %i
+ if not os.path.exists("%s/repos/%s" %(tmprepo,i)):
+ os.makedirs("%s/repos/%s" %(tmprepo,i), 0755)
+
+ # source repo
+ os.system("cp -al %s/repos/%s/source %s/repos/%s" %(repo['TopLevel'], i, tmprepo, i))
+
+ self.builddata.repos.append(i)
+
+ # arch specific repos
+ for arch in repo['Architectures']:
+ if ready[arch]:
+ os.system("cp -al %s/repos/%s/%s %s/repos/%s" %(repo['TopLevel'], i, arch, tmprepo, i))
+
+ # decide to put which project's build.conf under 'builddata'
+ prjconf = self._get_buildbase_conf(rname)
+ if prjconf:
+
+ import hashlib
+ prjconf_fn = "%s-build.conf" % hashlib.sha256(prjconf).hexdigest()
+
+ self.builddata.buildconf = prjconf_fn
+
+ # creating a snapshot is basically a copy of the daily build to
+ # a new location with a build ID.
+ # once snapshot is created, we can start image creation process
+ print "========================================"
+ print "We are ready to create a snapshot for %s (and all other related repos)" %rname
+ sys.stdout.flush()
+
+ assert('TopLevel' in repo and 'PartOf' in repo)
+
+ build_id = "%s.%s" %(release_id, repo['Release'])
+ self.builddata.id = build_id
+
+ if 'SnapshotDir' in repo:
+ top = repo['SnapshotDir']
+ else:
+ top = builds_base
+
+ if not os.path.isdir(top):
+ os.makedirs(top, 0755)
+
+ snapshotdir = "%s/%s" %(top, build_id)
+
+ print "linking %s to %s" %(tmprepo, snapshotdir)
+ os.system("cp -al %s %s" %(tmprepo, snapshotdir))
+ os.chmod(snapshotdir, 0755)
+
+ if 'Link' in repo:
+ # create symbolic links
+ if os.path.exists("%s/%s" %(top, repo['Link'])):
+ os.remove("%s/%s" %(top, repo['Link']))
+ print "Creating symlink %s -> %s/%s" %(snapshotdir, top, repo['Link'])
+ os.symlink(build_id, "%s/%s" %(top, repo['Link']))
+
+ try:
+ os.makedirs("%s/builddata" %(snapshotdir))
+ except:
+ pass
+
+ self.builddata.save_to_file("%s/builddata/build.xml" %(snapshotdir))
+
+ # Save OBS project building config here
+ if prjconf:
+ with open("%s/builddata/%s" % \
+ (snapshotdir, prjconf_fn), 'w') as wf:
+ wf.write(prjconf)
+
+ # to put buildlog of all packages under builddata
+ print "========================================"
+ print 'Copying all bulidlog to builddata dir ...'
+ sys.stdout.flush()
+
+ buildbase = self.server_conf.obs_building_path
+
+ database = "%s/builddata/buildlogs" % snapshotdir
+ fail_dir = os.path.join(database, 'failed')
+ succ_dir = os.path.join(database, 'succeeded')
+
+ os.makedirs(fail_dir)
+ os.makedirs(succ_dir)
+
+ for arch in repo['Architectures']:
+ builddir = os.path.join(buildbase,
+ repo['Project'],
+ repo['Target'],
+ ARCHES[arch])
+ for pkg in os.listdir(builddir):
+ if pkg.startswith(':'):
+ continue
+ pkgdir = os.path.join(builddir, pkg)
+ with file(os.path.join(pkgdir, 'status')) as f:
+ statusline = f.readline()
+ if 'status="succeeded"' in statusline:
+ logf = os.path.join(database, 'succeeded', pkg + '.buildlog.txt')
+ elif 'status="failed"' in statusline:
+ logf = os.path.join(database, 'failed', pkg + '.buildlog.txt')
+ else:
+ # ignore other status
+ logf = None
+
+ if logf:
+ os.system('cp %s %s' % (os.path.join(pkgdir, 'logfile'),
+ logf))
+
+ # cleanup tmp dir and links
+ shutil.rmtree(tmprepo)