Repomaker related codes cleanups
authorLingchaox Xin <lingchaox.xin@intel.com>
Fri, 28 Jun 2013 09:01:48 +0000 (17:01 +0800)
committerLingchaox Xin <lingchaox.xin@intel.com>
Mon, 1 Jul 2013 02:25:26 +0000 (10:25 +0800)
Change-Id: I4b8baceb14063997b1ca01246b5a650a8f10f6e7
Signed-off-by: Lingchaox Xin <lingchaox.xin@intel.com>
job_repomaker.py [deleted file]
repomaker/__init__.py [deleted file]
repomaker/email_templates/new_build [deleted file]
repomaker/email_templates/repo_published [deleted file]
repomaker/linkrepo.py [deleted file]
repomaker/release.py [deleted file]

diff --git a/job_repomaker.py b/job_repomaker.py
deleted file mode 100755 (executable)
index e74955b..0000000
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import with_statement
-import os, sys, io
-import tempfile
-import ConfigParser
-import optparse
-from string import Template
-import re
-import yaml
-try:
-    import json
-except ImportError:
-    import simplejson as json
-
-from common.tempbuildpkg import BuildService2
-from common.buildtrigger import trigger_info, trigger_next
-import repomaker
-import base64
-import time
-
-from images import generate_images_info, get_ks_list, set_relative_uri
-
-P_NAME = "repomaker"
-
-class ServerConf:
-
-    def __init__(self):
-        pass
-    def __getitem__(self, key):
-        return getattr(self, key)
-
-def is_intrested_project(project):
-    for keyword in os.getenv('REPOMAKER_PROJECTS', '').split(' '):
-        if keyword.endswith('*'):
-            if project.startswith(keyword[:-1]):
-                return True
-        else:
-            if project == keyword:
-                return True
-    return False
-
-def image_dispatch(repoinfo):
-
-    ready = repoinfo['SNAPSHOT']
-
-    release_build_id = repoinfo['release_build_id']
-    if release_build_id:
-        print 'snapshot:', str(ready), 'build_id', release_build_id
-
-    if ready and release_build_id:
-
-        if 'RELATIVE_URI' in repoinfo.keys():
-            set_relative_uri(repoinfo['RELATIVE_URI'])
-            images_path = repoinfo['DEST_DIR']
-
-        ks_list = get_ks_list(os.getenv('CONF_BASEURL', ''), release_build_id)
-        if ks_list:
-            images_info = generate_images_info ([(ksn, None) for ksn in ks_list],
-                            os.getenv('CONF_BASEURL', ''),
-                            release_build_id,
-                            os.getenv('MAILTO', ''),
-                            images_path,
-                            os.getenv('RSYNC_SERVER', ''))
-            print 'launched for:'
-            print '\n'.join(ks_list)
-            return images_info
-        else:
-            print 'Empty kickstart file list, skip!'
-
-def create_repo(serverconf, event_fields, repoinfo):
-    try:
-
-        bs = BuildService2(apiurl = serverconf.apiurl,
-                           apiuser = serverconf.apiuser,
-                           apipasswd = serverconf.apipasswd)
-    except Exception, e:
-        print 'OBS access errors: ', str(e)
-        sys.exit(-1)
-
-    repomgr = repomaker.RepoManager(serverconf, bs, repoinfo)
-
-    print "%s job running" % P_NAME
-
-    status = repomgr.create(event_fields['project'], event_fields['repo'])
-    return status
-
-if __name__ == "__main__":
-
-    print '---[JOB STARTED]----------------------------------------'
-    sys.stdout.flush()
-
-    server_conf = ServerConf()
-    server_conf.raw_repos = os.getenv('PATH_RAW_REPOS', '')
-    server_conf.repos_conf = os.getenv('PATH_REPO_CONF', '')
-    server_conf.oscrc = os.getenv('PATH_OSCRC', '')
-    server_conf.apiurl = os.getenv('OBS_API_URL', '')
-    server_conf.apiuser = os.getenv('OBS_API_USERNAME', '')
-    server_conf.apipasswd = os.getenv('OBS_API_PASSWD', '')
-    server_conf.basedir = os.getenv('PATH_BASEDIR', '')
-    server_conf.top_tmpdir = os.getenv('PATH_TOP_TMPDIR', '')
-    server_conf.obs_triggers_path = os.getenv('OBS_TRIGGERS_PATH') or '/srv/obs/repos_sync'
-    server_conf.obs_building_path = os.getenv('OBS_BUILDING_PATH') or '/srv/obs/build'
-    server_conf.mailto = filter(None, [s.strip() for s in \
-            os.getenv('MAILTO','').split(',')]) or []
-    server_conf.no_armv8 = (os.getenv('NO_ARMV8', '').lower()) == 'yes' or False
-    server_conf.sandbox_repo_baseurl = os.getenv('SANDBOX_REPO_BASEURL') or None
-    server_conf.email_templates_dir = os.getenv('EMAIL_TEMPLATES_DIR', '')
-    server_conf.workdir = os.getenv('WORKSPACE', '')
-    repo_conf_fn = None
-
-    event_fields = trigger_info(os.getenv('TRIGGER_INFO', ''))
-
-    if not event_fields:
-        print "Invalid OBS event: %s" %(OBS_EVENT_STRING)
-        sys.exit(-1)
-
-    project = event_fields["project"]
-    target = event_fields["repo"]
-    if not is_intrested_project(project):
-        print "Pass, not intrested project %s" %project
-        sys.exit(0)
-
-    repoinfo = {}
-    create_repo(server_conf, event_fields, repoinfo)
-
-    print "====LOG:REPO_MAKER:REPOINFO===="
-    import pprint
-    pp = pprint.PrettyPrinter(indent=4)
-    print pp.pprint(repoinfo)
-    print "====LOG:REPO_MAKER:REPOINFO======================END===="
-    if repoinfo:
-        image_info = image_dispatch(repoinfo)
-        if image_info:
-            for index, imagerinfo in enumerate(image_info):
-                trigger_next('image_trigger_%s' %(index), imagerinfo)
diff --git a/repomaker/__init__.py b/repomaker/__init__.py
deleted file mode 100644 (file)
index 46a1c09..0000000
+++ /dev/null
@@ -1,639 +0,0 @@
-#!/usr/bin/env python
-
-#    Copyright (c) 2009 Intel Corporation
-#
-#    This program is free software; you can redistribute it and/or modify it
-#    under the terms of the GNU General Public License as published by the Free
-#    Software Foundation; version 2 of the License
-#
-#    This program is distributed in the hope that it will be useful, but
-#    WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-#    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-#    for more details.
-#
-#    You should have received a copy of the GNU General Public License along
-#    with this program; if not, write to the Free Software Foundation, Inc., 59
-#    Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
-try:
-    import json
-except ImportError:
-    import simplejson as json
-
-from linkrepo import LinkRepo
-import os, sys
-import re
-import time
-from datetime import datetime
-import tempfile
-import stat
-import shutil
-import xml.etree.ElementTree as ET
-import yaml
-
-from xml.dom import minidom
-
-OBS_ARCHES =     {'ia32': 'i586', 'armv7l': 'armv7el', 'armv7hl': 'armv8el'}
-# arch names mapping: 'obs-repo-arch': 'snapshot-repo-arch'
-# (with OBS_ARCHES key-value reversed)
-SNAP_ARCHES = dict((snap, obs) for obs, snap in OBS_ARCHES.iteritems())
-
-def get_toplevel_dirs_from_repoconf(confp, temp_basedir='/srv/builds/temp'):
-    """Helper function to extract the toplevel paths from repo configs
-    """
-    paths = set()
-    if os.path.isfile(confp):
-        repos = yaml.load(file(confp))['Repositories']
-        for repo in repos:
-            if 'TopLevel' in repo:
-                paths.add(os.path.dirname(repo['TopLevel'].rstrip('/')))
-            elif 'PartOf' in repo and 'Branch' in repo:
-                top_level = TOP_LEVEL_FORMAT %(temp_basedir, repo['Branch'], repo['PartOf'])
-                paths.add(os.path.dirname(top_level))
-
-    return paths
-
-class BuildData:
-    # fixing of buggy xml.dom.minidom.toprettyxml
-    XMLTEXT_RE = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
-
-    def __init__(self):
-        self.archs = []
-        self.repos = []
-
-    def __setitem__(self, key, value):
-        return setattr(self, key, value)
-
-    def save_to_file(self, path_to_file):
-
-        impl = minidom.getDOMImplementation()
-
-        dom = impl.createDocument(None, "build", None)
-        root = dom.documentElement
-
-        archs = dom.createElement('archs')
-        root.appendChild(archs)
-
-        for arch in self.archs:
-            ele = dom.createElement('arch')
-            ele.appendChild(dom.createTextNode(arch))
-            archs.appendChild(ele)
-
-        repos = dom.createElement('repos')
-        root.appendChild(repos)        
-
-        for arch in self.repos:
-            ele = dom.createElement('repo')
-            ele.appendChild(dom.createTextNode(arch))
-            repos.appendChild(ele)
-
-        other_keys = self.__dict__.keys()
-        other_keys.remove('archs')
-        other_keys.remove('repos')
-
-        for attr in other_keys:
-            if hasattr(self, attr):
-                ele = dom.createElement(attr.lower())
-                ele.appendChild(dom.createTextNode(getattr(self,attr)))
-                root.appendChild(ele)
-
-        builddate_xml = open(path_to_file, 'w')
-        builddate_xml.write(self.XMLTEXT_RE.sub('>\g<1></', dom.toprettyxml(indent="  ")))
-        builddate_xml.close()
-
-class InternalWorkitem:
-    def __init__(self):
-        self.forwarding_fields = ['dest_dir',
-                                  'relative_uri',
-                                  'rsync_server',
-                                  'last_build_id',
-                                  'To',
-                                  'snapshotdir',
-                                 ]
-
-    def __setitem__(self, key, value):
-        return setattr(self, key, value)
-
-    def __getitem__(self, key):
-        return getattr(self, key)
-
-    def update_fields(self, target):
-        if isinstance(target, dict):
-            for key, value in self.__dict__.iteritems():
-                if value == '':
-                    print "Warning:%s is empty!" %key
-                target[key.upper()] = value
-            target['release_build_id'] = self.release_build_id
-            return True
-        elif 'set_field' in dir(target):
-            for key, value in self.__dict__.iteritems():
-                if value == '':
-                    print "Warning:%s is empty!" %key
-                target.set_field(key, value)
-            return True
-        else:
-            return False
-
-class RepoConf:
-    """ Class as wrapper of repos.yaml """
-
-    def __init__(self, repos_conf_file, default_basedir, temp_basedir):
-        self.default_basedir = default_basedir
-        self.tmpdir = temp_basedir
-
-        if os.path.isfile(repos_conf_file):
-            self.repos = yaml.load(file(repos_conf_file))['Repositories']
-        else:
-            raise Exception, "Fatal: Invalid repo configuration file: %s"  %(repos_conf_file)
-
-    def _compose_keys(self, repo):
-        if 'PartOf' in repo and 'Branch' in repo and 'RepoName' in repo:
-            if 'BaseDir' not in repo:
-                repo['BaseDir'] = self.default_basedir
-
-            repo['snapshotbase'] = "%s/%s/%s" %(repo['BaseDir'],
-                                                repo['Branch'],
-                                                repo['PartOf'])
-            repo['toplevel'] = "%s/cache/%s-%s" %(self.tmpdir,
-                                                  repo['Branch'],
-                                                  repo['PartOf'])
-            repo['location'] = "%s/repos/%s" %(repo['toplevel'],
-                                               repo['RepoName'])
-        else:
-            print 'Warning: skip invalid repo definition:', repo['Name']
-            return False
-
-        if 'OBS_Architectures' in repo:
-            repo['archs'] = repo['OBS_Architectures']
-        else:
-            repo['archs'] = ['ia32']
-
-        return True
-
-    def get_repo(self, prj, target="standard"):
-        repo = None
-        for r in self.repos:
-            if 'OBS_Project' not in r or 'OBS_Target' not in r:
-                continue
-
-            if r['OBS_Project'] == prj and r['OBS_Target'] == target:
-                repo = r
-                if not self._compose_keys(repo):
-                    continue
-
-                break
-
-        return repo
-
-    def get_repo_by_name(self, name):
-        repo = None
-        for r in self.repos:
-            if 'OBS_Project' not in r or 'OBS_Target' not in r:
-                continue
-
-            if r['Name'] == name:
-                repo = r
-                repo['targets'] = []
-
-                if not self._compose_keys(repo):
-                    continue
-
-                for a in repo['archs']:
-                    try:
-                        arch = OBS_ARCHES[a]
-
-                        if a == 'armv7hl' and self.no_armv8:
-                            arch = a
-                        repo['targets'].append(('/'.join((r['OBS_Target'] , arch))))
-                    except KeyError:
-                        print "Unspecified arch %s" % a
-
-                break
-
-        return repo
-
-    def save_to_file(self, path_to_file, ext = None):
-
-        stream = file(path_to_file, 'w')
-        if ext:
-            self.repos.append(ext)
-        yaml.dump({"Repositories" : self.repos}, stream)
-
-class RepoManager:
-    def __init__(self, myconf, bs = None, workitem = None):
-        self.conf = myconf
-        self.bs = bs
-        self.wi = workitem
-        self.wi_fields = InternalWorkitem()
-        self.rc = RepoConf(myconf['repos_conf'], myconf['basedir'], myconf['top_tmpdir'])
-
-        if hasattr(myconf, 'no_armv8') and myconf['no_armv8']:
-            self.no_armv8 = True
-        else:
-            self.no_armv8 = False
-
-    def _read_template(self, name):
-        fp = os.path.join(self.conf['email_templates_dir'], name)
-        try:
-            with file(fp) as f:
-                return f.read()
-        except:
-            return ''
-
-    def _check_published(self, repo, arch = None):
-        prj = repo['OBS_Project']
-        state = self.bs.get_repo_state(prj)
-        print "state for %s (%s): %s" %(prj, arch, state)
-        ret = True
-        for t in repo['targets']:
-            (target, architecture) = t.split("/")
-            if architecture in SNAP_ARCHES:
-                repo_arch = SNAP_ARCHES[architecture]
-            else:
-                repo_arch = architecture
-
-            if t in state and arch == repo_arch:
-                print "> %s %s is %s" %(prj, t, state[t])
-                count = 1
-                while ( state[t] == 'publishing' or \
-                        state[t] == 'finished' or \
-                        state[t] == 'scheduling' ) and count < 100:
-                    print "repo %s status is %s, waiting..." % (t, state[t])
-                    sys.stdout.flush()
-
-                    time.sleep(5)
-                    state = self.bs.get_repo_state(prj)
-                    count = count + 1
-
-                if state[t] != 'published':
-                    print ">> %s %s is still in '%s', skip" %(prj, t, state[t])
-                    ret = False
-                    break
-
-        return ret
-
-    def _update_image_configs(self, dest_repo, dest_pkg):
-        """ Checking for patterns at: rawrepo/repodata/{group.xml,patterns.xml}.gz
-        """
-        import re
-
-        pg = re.compile("image-configurations.*.rpm")
-        found = False
-        for root, dirs, files in os.walk("%s" %dest_pkg):
-            for f in files:
-                if re.match(pg, f):
-                    print "image configurations found: %s/%s" %(root,f)
-                    tmpdir = tempfile.mkdtemp()
-                    pwd = os.getcwd()
-                    os.chdir(tmpdir)
-                    os.system("rpm2cpio %s/%s | cpio -idmv" %(root,f))
-                    os.system("mkdir -p %s/builddata" %dest_repo)
-                    os.system("rm %s/builddata/image-configs/*.ks" %(dest_repo))
-                    os.system("mkdir -p %s/builddata/image-configs" %dest_repo)
-                    if os.path.exists("%s/usr/share/image-configurations/image-configs.xml" %tmpdir):
-                        shutil.copyfile("%s/usr/share/image-configurations/image-configs.xml" %tmpdir, '%s/builddata/image-configs.xml' %dest_repo)
-                    os.system("cp %s/usr/share/image-configurations/*.ks %s/builddata/image-configs" %(tmpdir, dest_repo))
-                    os.chdir(pwd)
-                    shutil.rmtree(tmpdir)
-                    found = True
-                    break
-
-            if found: break
-
-        if not found:
-            print "did not find any image configurations"
-
-    def _check_ready(self, ready = {'ia32': True}):
-        # for ARM repo, ia32 must be ready
-        if ready['ia32']:
-
-            # for x86_64, 32bit and 64bit must be generated together
-            if 'x86_64' in ready and not ready['x86_64']:
-                ready['ia32'] = False
-
-            for rr in ready.keys():
-                if ready[rr]:
-                    return True
-
-        return False
-
-    def _get_buildbase_conf(self, rname):
-        """Firstly find out the toplevel BASE repo of it, then fetch
-          the config and return.
-        """
-
-        brepo = self.rc.get_repo_by_name(rname)
-        while brepo and 'DependsOn' in brepo:
-            brepo = self.rc.get_repo_by_name(brepo['DependsOn'])
-        # now brepo should be the toplevel BASE repo or itself
-
-        if not brepo:
-            return None
-        else:
-            return self.bs.get_project_config(brepo['OBS_Project'])
-
-    def _save_project_config(self, rname, snapshotdir):
-        """ save project config to snapshot dir, return the prjconf name
-        """
-        # decide to put which project's build.conf under 'builddata'
-        prjconf = self._get_buildbase_conf(rname)
-        if prjconf:
-            import hashlib
-            prjconf_fn = "%s-build.conf" % hashlib.sha256(prjconf).hexdigest()
-            # Save OBS project building config here
-            with open("%s/builddata/%s" % \
-                        (snapshotdir, prjconf_fn), 'w') as wf:
-                wf.write(prjconf)
-            return prjconf_fn
-
-        return ''
-
-    def create(self, prj = None, target = None):
-
-        if not prj:
-            prj = self._get_param('project')
-        if not target:
-            target = self._get_param('target')
-
-        if not prj or not target:
-            print '"project" and "target" must be specified in params'
-            return False
-
-        repo = self.rc.get_repo(prj, target)
-        if not repo:
-            print "No repos meta in %s for %s/%s" % (self.conf['repos_conf'], prj, target)
-            return False
-
-        rname = repo['Name']
-        this_repo = self.rc.get_repo_by_name(rname)
-
-        if 'DependsOn' in repo:
-            top_repo = self.rc.get_repo_by_name(repo['DependsOn'])
-
-        if 'SandboxOf' in repo:
-            sandbox_repo = self.rc.get_repo_by_name(repo['SandboxOf'])
-
-        print "=========================="
-        print datetime.today()
-        print "Repo: %s" %repo['Name']
-        print "=========================="
-        nowtime = time.time()
-
-        basedir = repo['BaseDir']
-
-        # see if all related repos are published and prepare for a lockdown and a snapshot
-        ready = {'ia32': True}
-
-        link = {}
-        for arch in repo['archs']:
-            print "Checking status of %s" %arch
-            ready[arch] = True
-            link[arch] = True
-            ret = self._check_published(this_repo, arch)
-            print "ret: %s" %ret
-            if not ret:
-                print "%s is not ready yet, can't create a snapshot" %rname
-                ready[arch] = False
-                link[arch] = False
-                continue
-
-            if 'DependsOn' in repo:
-                if self._check_published(top_repo, arch):
-                    print '%s depends on %s which is published' %(rname, repo['DependsOn'] )
-                    if 'Dependents' in top_repo:
-                        for d in top_repo['Dependents']:
-                            deprepo = self.rc.get_repo_by_name(d)
-                            if not self._check_published(deprepo, arch):
-                                ready[arch] = False
-                                print "%s is not ready yet, can't create a snapshot" %d
-                                break
-                else:
-                    print '%s depends on %s which is not published yet' %(rname, repo['DependsOn'] )
-                    ready[arch] = False
-
-            elif 'Dependents' in repo:
-                for d in repo['Dependents']:
-                    deprepo = self.rc.get_repo_by_name(d)
-                    if not self._check_published(deprepo, arch):
-                        ready[arch] = False
-                        print "%s is not ready yet, can't create a snapshot" %d
-                        break
-
-                    if 'DependsOn' in deprepo and rname in deprepo['DependsOn']:
-                        if not self._check_published(top_repo, arch):
-                            ready[arch] = False
-                            print "%s is not ready yet, can't create a snapshot" %deprepo['DependsOn']
-                            break
-
-        status = True
-        for arch in repo['archs']:
-            if link[arch]:
-                print "Creating repo for %s %s arch: %s, " %(prj, target, arch),
-
-                if 'GpgKey' in repo:
-                    gpgkey = repo['GpgKey']
-                else:
-                    gpgkey = None
-
-                if 'SignUser' in repo:
-                    signer = repo['SignUser']
-                else:
-                    signer = None
-
-                lr = LinkRepo(self.conf['raw_repos'], gpgkey, signer)
-
-                liverepo = {'prj': prj,
-                            'target': target,
-                            'arch': arch,
-                           }
-
-                # support of 'SandboxOf' repos and images for staging prjs
-                baserepo = None
-                if 'SandboxOf' in repo:
-                    if sandbox_repo:
-                        baserepo = {'prj': sandbox_repo['OBS_Project'],
-                                    'target': sandbox_repo['OBS_Target'],
-                                    'arch': arch,}
-                    else:
-                        print 'Invalid SandboxOf repo settings for %s' % prj
-
-
-                # support for hidden binary rpms to be included in snapshot
-                if 'ExtraRpms' in repo:
-                    extrarpms = repo['ExtraRpms']
-                else:
-                    extrarpms = None
-
-                # whether to put obs project build conf to repodata dir
-                if 'ProjectConfig' in repo and repo['ProjectConfig']:
-                    prjconf = self.bs.get_project_config(prj)
-                else:
-                    prjconf = None
-
-                status = lr.linkrepo(self.conf['obs_triggers_path'], liverepo, repo['location'], baserepo, extrarpms, prjconf)
-                if not os.path.exists("%s/builddata/image-configs.xml" %(repo['location'])) and status:
-                    self._update_image_configs(repo['toplevel'], "%s/ia32/packages" %repo['location'])
-
-                print "snapshot result: %s" %( "Ok" if status else "Error")
-
-                if not status:
-                    self.wi_fields['snapshot'] = False
-                    return False
-
-        if self._check_ready(ready):
-            tmprepo = tempfile.mkdtemp(prefix='repomaker-',
-                                       dir=os.path.join(self.conf['top_tmpdir'], 'repo'))
-
-            os.makedirs("%s/repos" %tmprepo, 0755)
-            os.system("cp -al %s/builddata %s" %(repo['toplevel'], tmprepo))
-
-            builddata = BuildData()
-
-            for arch in repo['archs']:
-                if ready[arch]:
-                    builddata.archs.append(arch)
-
-            for i in os.listdir("%s/repos" %repo['toplevel']):
-                print "working on %s" %i
-                if not os.path.exists("%s/repos/%s" %(tmprepo,i)):
-                    os.makedirs("%s/repos/%s" %(tmprepo,i), 0755)
-
-                # source repo
-                os.system("cp -al %s/repos/%s/source %s/repos/%s" %(repo['toplevel'], i, tmprepo, i))
-                builddata.repos.append(i)
-
-                # arch specific repos
-                for arch in repo['archs']:
-                    if ready[arch]:
-                        os.system("cp -al %s/repos/%s/%s %s/repos/%s" %(repo['toplevel'], i, arch, tmprepo, i))
-
-            # creating a snapshot is basically a copy of the daily build to
-            # a new location with a build ID.
-            # once snapshot is created, we can start image creation process
-            print "We are ready to create a snapshot for %s (and all other related repos)" %rname
-
-            from release import BuildID
-            bid = BuildID()
-            last_id, build_id = bid.get_new_build_id(release=repo['Prefix'], partof=repo['PartOf'])
-            builddata.id = build_id
-
-            top = repo['snapshotbase']
-            if not os.path.isdir(top):
-                os.makedirs(top, 0755)
-
-            snapshotdir = "%s/%s" %(top, build_id)
-
-            print "linking %s to %s" %(tmprepo, snapshotdir)
-            os.system("cp -al %s %s" %(tmprepo, snapshotdir))
-            os.chmod(snapshotdir, 0755)
-
-            if 'Link' in repo:
-                # create symbolic links
-                if os.path.exists("%s/%s" %(top, repo['Link'])):
-                    os.remove("%s/%s" %(top, repo['Link']))
-                print "Creating symlink %s -> %s/%s" %(snapshotdir, top, repo['Link'])
-                os.symlink(build_id, "%s/%s" %(top, repo['Link']))
-
-            self.wi_fields['release_build_id'] = build_id
-            self.wi_fields['last_build_id'] = last_id
-            self.wi_fields['snapshot'] = True
-            self.wi_fields['template_str'] = self._read_template('new_build')
-            self.wi_fields['snapshot_subject'] = 'Snapshot %s created' %(build_id)
-
-            # make sure 'builddata' dir exists even no 'image-configurations.rpm'
-            try:
-                os.makedirs("%s/builddata" % snapshotdir)
-            except OSError:
-                pass
-
-            prjconf_fn = self._save_project_config(rname, snapshotdir)
-            if prjconf_fn:
-                builddata.buildconf = prjconf_fn
-
-            builddata.save_to_file("%s/builddata/build.xml" %(snapshotdir))
-
-            # to put buildlog of all packages under builddata
-            print 'Copying all bulidlog to builddata dir ...'
-            sys.stdout.flush()
-
-            buildbase = self.conf['obs_building_path']
-
-            database = "%s/builddata/buildlogs" % snapshotdir
-            fail_dir = os.path.join(database, 'failed')
-            succ_dir = os.path.join(database, 'succeeded')
-            os.makedirs(fail_dir)
-            os.makedirs(succ_dir)
-
-            for arch in repo['archs']:
-                builddir = os.path.join(buildbase,
-                                        repo['OBS_Project'],
-                                        repo['OBS_Target'],
-                                        OBS_ARCHES[arch])
-                for pkg in os.listdir(builddir):
-                    if pkg.startswith(':'):
-                        continue
-                    pkgdir = os.path.join(builddir, pkg)
-                    try:
-                        with file(os.path.join(pkgdir, 'status')) as f:
-                            statusline = f.readline()
-                    except:
-                        continue
-
-                    if 'status="succeeded"' in statusline:
-                        logf = os.path.join(database, 'succeeded', pkg + '.buildlog.txt')
-                    elif 'status="failed"' in statusline:
-                        logf = os.path.join(database, 'failed', pkg + '.buildlog.txt')
-                    else:
-                        # ignore other status
-                        logf = None
-
-                    if logf:
-                        os.system('cp %s %s' % (os.path.join(pkgdir, 'logfile'),
-                                                logf))
-
-            # cleanup tmp dir and links
-            shutil.rmtree(tmprepo)
-
-        if status:
-            """
-            # TODO: config to enable/disable rsync
-            print "Running rsync to public repo server"
-            os.system("rsync -H --delete-after -avz /srv/www/vhosts/repo.abc.com/* rsync://repo.abc.com/external/")
-            print "Done"
-            """
-
-            # for mails
-            self.wi_fields['template'] = 'repo_published'
-            self.wi_fields['subject'] = 'Repo for %s/%s published' %(prj, target)
-            # if 'MailTo' is set in repo conf, then send emails to this addr
-            if 'MailTo' in repo:
-                mailto = repo['MailTo']
-                if not isinstance(mailto, list):
-                    mailto = filter(None, [s.strip() for s in mailto.split(',')])
-                self.wi_fields['To'] = mailto
-            else:
-                self.wi_fields['To'] = self.conf['mailto']
-
-            self.wi_fields['location'] = repo['location']
-            self.wi_fields['target'] = target
-            pubarch = [ rr for rr in ready.keys() if ready[rr] ]
-            self.wi_fields['archs'] = pubarch
-
-            # special rsync server for this repo
-            if 'RsyncServer' in repo:
-                self.wi_fields['rsync_server'] = repo['RsyncServer']
-
-            # special internal used baseurl of repos
-            if 'RepoBaseUrl' in repo:
-                self.wi_fields['conf_baseurl'] = repo['RepoBaseUrl']
-            else:
-                self.wi_fields['conf_baseurl'] = self.conf['repo_baseurl']
-
-            # send it to image-dispatcher TODO 'dest_dir' and 'snapshotdir' duplicated, remove one
-            self.wi_fields['dest_dir'] = self.wi_fields['snapshotdir'] = repo['snapshotbase']
-            self.wi_fields['relative_uri'] = "%s/%s" %(repo['Branch'], repo['PartOf'])
-
-            return self.wi_fields.update_fields(self.wi)
-        else:
-            return False
-
diff --git a/repomaker/email_templates/new_build b/repomaker/email_templates/new_build
deleted file mode 100644 (file)
index 580f0ff..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-Hi,
-
-A new snapshot ${release_build_id} was published and the images are going to be
-generated.
-
-
-Regards,
-Release Engineering Team
-
-[This message was auto-generated]
-
diff --git a/repomaker/email_templates/repo_published b/repomaker/email_templates/repo_published
deleted file mode 100644 (file)
index 046653a..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-Hi,
-
-The repo for ${project}/${target} was published and is available from
-${url}
-
-
-Difference to last reposiotry:
-
-${diff}
-
-Regards,
-Release Engineering Team
-
-[This message was auto-generated]
-
diff --git a/repomaker/linkrepo.py b/repomaker/linkrepo.py
deleted file mode 100644 (file)
index 9e67a3a..0000000
+++ /dev/null
@@ -1,380 +0,0 @@
-#!/usr/bin/env python
-
-#    Copyright (c) 2009 Intel Corporation
-#
-#    This program is free software; you can redistribute it and/or modify it
-#    under the terms of the GNU General Public License as published by the Free
-#    Software Foundation; version 2 of the License
-#
-#    This program is distributed in the hope that it will be useful, but
-#    WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-#    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-#    for more details.
-#
-#    You should have received a copy of the GNU General Public License along
-#    with this program; if not, write to the Free Software Foundation, Inc., 59
-#    Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
-import os,sys
-import optparse
-import glob
-import shutil
-import stat
-import tempfile
-import re
-import rpm
-import subprocess
-
-pjoin = os.path.join # shortcut
-
-SIGNER = "/usr/bin/sign"
-
-class LinkRepo():
-
-    def __init__(self, live, gpg_key=None, sign_user=None):
-        self.live = live
-
-        if gpg_key and os.path.isfile(self.gpg_key):
-            self.gpg_key=gpg_key
-        else:
-            self.gpg_key = None
-
-        if os.path.exists(SIGNER) and os.access(SIGNER, os.X_OK):
-            if sign_user:
-                self.sign_cmd=SIGNER + ' -u %s' % sign_user
-            else:
-                self.sign_cmd=SIGNER
-
-        else:
-            print 'Cant access signer, repos will not be signed!!!'
-            self.sign_cmd = None
-
-    def _rm_files(self, wild_path):
-        for fn in glob.glob(wild_path):
-            os.remove(fn)
-
-    def copy_missing(self,pkg1, pkg2, dest):
-      def get_list(repo):
-        out = {}
-        for p in os.listdir(repo):
-          pac = p.rpartition("-")[0].rpartition("-")[0]
-          if pac is not None:
-            out[pac] = p
-        return out
-
-
-      a = get_list(pkg1)
-      b = get_list(pkg2)
-
-      x = set(a.keys())
-      y= set(b.keys())
-      for c in x - y:
-        print "%s" %(dest)
-        #shutil.copyfile("%s/%s" %(pkg1, a[c]), "%s/%s" %(dest, a[c]))
-        #print "cp %s/%s %s/%s" %(pkg1, a[c], dest, a[c])
-        os.system("cp %s/%s %s/%s" %(pkg1, a[c], dest, a[c]))
-
-    def _rpmname_from_file(self, path):
-        """Get rpm package name from the path of rpm file"""
-
-        if not path.endswith('.rpm'):
-            return None
-
-        ts = rpm.ts()
-        ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
-        fdno = os.open(path, os.O_RDONLY)
-        hdr = ts.hdrFromFdno(fdno)
-        os.close(fdno)
-
-        return hdr[rpm.RPMTAG_NAME]
-
-    def _rpms_from_dir(self, dpath):
-        """Get set of rpm names from dir which contains rpm files"""
-        return filter(None, map(self._rpmname_from_file, glob.glob(pjoin(dpath, '*.rpm'))))
-
-    def _ln_files(self, wild_path, dst, ignore_exists=False):
-        if ignore_exists:
-            exist_rpms = self._rpms_from_dir(dst)
-
-        for fn in glob.glob(wild_path):
-            if not ignore_exists or \
-               self._rpmname_from_file(fn) not in exist_rpms:
-                try:
-                    os.link(fn, pjoin(dst, os.path.basename(fn)))
-                except OSError:
-                    pass
-
-    def snapshot(self, prjraw, arch, dest, dest_pkg, dest_debug, dest_src, ignore_exists=False):
-        """Link rpm files from src path to destin path to make a snapshot.
-            `ignore_exists`: means whether to overwrite the existing
-                             file for the same rpm package.
-        """
-
-        print "creating snapshot from live repo: %s" %prjraw
-        # 'ia32' as the default arch
-        srpm_dir = 'src'
-        drpm_dir = 'i*'
-        bins = "i386 i586 i686 noarch"
-        xsrpm_dir = "src.armv8el"
-        if arch.startswith('armv7l'):
-            #srpm_dir += '.armv7el'
-            drpm_dir = 'armv7l'
-            bins = "armv7l noarch noarch.armv7el"
-        elif arch.startswith('armv7hl'):
-            #srpm_dir += '.armv8el'
-            drpm_dir = 'armv7hl'
-            bins = "armv7hl noarch noarch.armv8el"
-
-        srpm_glob = pjoin(prjraw, srpm_dir, '*src.rpm')
-        drpm_glob = pjoin(prjraw, drpm_dir, '*debuginfo*.rpm')
-        dsrpm_glob = pjoin(prjraw, drpm_dir, '*debugsource*.rpm')
-
-        # snapshot src and debuginfo
-        self._ln_files(srpm_glob, dest_src, ignore_exists)
-        self._ln_files(drpm_glob, dest_debug, ignore_exists)
-        self._ln_files(dsrpm_glob, dest_debug, ignore_exists)
-
-        # copy missing arm packages
-        #print "copy missing src packages from %s to %s" %(pjoin(prjraw, srpm_dir), dest_src)
-        if os.path.exists(pjoin(prjraw, xsrpm_dir)) and srpm_dir == "src":
-          self.copy_missing(pjoin(prjraw, xsrpm_dir), pjoin(prjraw, "src"),  dest_src)
-
-        for bin in bins.split():
-            if os.path.isdir(pjoin(prjraw, bin)):
-                if bin.startswith('noarch.arm'):
-                    tbin = 'noarch'
-                else:
-                    tbin = bin
-
-                if not os.path.exists(pjoin(dest_pkg, tbin)):
-                    os.makedirs(pjoin(dest_pkg, tbin))
-                self._ln_files(pjoin(prjraw, bin, '*.%s.rpm' % tbin),
-                               pjoin(dest_pkg, tbin),
-                               ignore_exists)
-
-                self._rm_files(pjoin(dest_pkg, bin, '*debuginfo*rpm'))
-                self._rm_files(pjoin(dest_pkg, bin, '*debugsource*rpm'))
-
-        if os.path.isdir(pjoin(dest, arch, 'packages', 'repodata')):
-            self._ln_files(pjoin(dest, arch, 'debug', 'repodata', '*'), pjoin(dest_debug, 'repodata'))
-            self._ln_files(pjoin(dest, arch, 'packages', 'repodata', '*'), pjoin(dest_pkg, 'repodata'))
-            self._ln_files(pjoin(dest, 'source', 'repodata', '*'), pjoin(dest_src, 'repodata'))
-
-        return True
-
-    def sign(self, path):
-        if self.sign_cmd and self.gpg_key:
-            os.system('%s -d %s/repodata/repomd.xml' % (self.sign_cmd, path))
-            shutil.copyfile(self.gpg_key, '%s/repodata/repomd.xml.key' % path)
-        else:
-            print 'Not signing repos: %s %s' %(self.sign_cmd, self.gpg_key)
-
-    def createrepo(self, dest_pkg, dest_src, dest_debug):
-        """ Call external command 'createrepo' to generate repodata,
-            and sign the rempmd.xml if needed
-        """
-        if os.path.exists('%s/repodata/repomd.xml.asc' %dest_pkg):
-            os.system('rm %s/repodata/repomd.xml.asc' %dest_debug)
-            os.system('rm %s/repodata/repomd.xml.asc' %dest_src)
-            os.system('rm %s/repodata/repomd.xml.asc' %dest_pkg)
-
-
-        # use subprocess to make the createrepo non-blocking
-        # debug
-        debug_repo = subprocess.Popen('createrepo --quiet -d --changelog-limit=1 --update %s' % dest_debug, shell=True)
-        # source
-        src_repo = subprocess.Popen('createrepo --quiet -d --changelog-limit=10 --update %s' % dest_src, shell=True)
-
-        self._rm_files(pjoin(dest_pkg, 'repodata', '*comps*'))
-        self._rm_files(pjoin(dest_pkg, 'repodata', '*patterns*'))
-        self._rm_files(pjoin(dest_pkg, 'repodata', '*group*'))
-        self._rm_files(pjoin(dest_pkg, 'repodata', '*image-config*'))
-        # packages
-        pkg_repo = subprocess.Popen('createrepo --quiet --unique-md-filenames -d --changelog-limit=5 --update %s' % dest_pkg, shell=True)
-
-        debug_repo.wait()
-        src_repo.wait()
-        pkg_repo.wait()
-        self.sign(dest_debug)
-        self.sign(dest_src)
-        self.sign(dest_pkg)
-
-    def update_package_groups( self, dest_pkg):
-        """ Checking for patterns at: rawrepo/repodata/{group.xml,patterns.xml}.gz
-        """
-        pg = re.compile("package-groups-.*.rpm")
-        found = False
-        for root, dirs, files in os.walk("%s" %dest_pkg):
-            for f in files:
-                if re.match(pg, f):
-                    print "package groups found: %s/%s" %(root,f)
-                    tmpdir = tempfile.mkdtemp()
-                    pwd = os.getcwd()
-                    os.chdir(tmpdir)
-                    os.system("rpm2cpio %s/%s | cpio -ivd ./usr/share/package-groups/patterns.xml" %(root,f))
-                    os.system("rpm2cpio %s/%s | cpio -ivd ./usr/share/package-groups/group.xml" %(root,f))
-                    shutil.copyfile("%s/usr/share/package-groups/patterns.xml" %tmpdir, '%s/repodata/patterns.xml' %dest_pkg)
-                    os.system('modifyrepo %s/repodata/patterns.xml %s/repodata' %(dest_pkg, dest_pkg))
-                    shutil.copyfile("%s/usr/share/package-groups/group.xml" %tmpdir, '%s/repodata/group.xml' %dest_pkg)
-                    os.system('modifyrepo %s/repodata/group.xml %s/repodata' %(dest_pkg, dest_pkg))
-                    self.sign(dest_pkg)
-                    os.chdir(pwd)
-                    shutil.rmtree(tmpdir)
-                    found = True
-
-        if not found:
-            print "did not find any package groups"
-
-    def update_pattern(self, prjraw, dest_pkg, meta):
-        """ Checking for patterns at: rawrepo/repodata/{group.xml,patterns.xml}.gz
-        """
-        # find out the pattern file name from repomd.xml if any
-        print "Checking for patterns at: %s/repodata/%s.gz" %( prjraw, meta )
-        pattern = None
-        with open('%s/repodata/repomd.xml' % prjraw) as f:
-            patf_re = re.compile("(repodata/.*%s.gz)" %meta)
-            for ln in f:
-                m = patf_re.search(ln)
-                if m:
-                    pattern = m.group(1)
-                    break
-
-        if pattern:
-            shutil.copyfile(pjoin(prjraw, pattern), '%s/repodata/%s.gz' %( dest_pkg, meta) )
-
-            pwd = os.path.abspath(os.curdir)
-            os.chdir(pjoin(dest_pkg, 'repodata'))
-            os.system('gunzip -f %s.gz' %meta)
-            os.chdir(pwd)
-
-            os.system('modifyrepo %s/repodata/%s %s/repodata' %(dest_pkg, meta, dest_pkg))
-        else:
-            print 'No %s found' %meta
-
-    def cleanup(self, dest, dest_tmp, arch):
-        """ cleanup old repo and move new ones to the final places,
-            then cleanup the tmp dirs
-        """
-
-        dest_src = pjoin(dest_tmp, 'source')
-
-        shutil.rmtree(pjoin(dest, arch), ignore_errors = True)
-        shutil.move(pjoin(dest_tmp, arch), dest)
-        if not arch.startswith('arm'):
-            shutil.rmtree(pjoin(dest, 'source'), ignore_errors = True)
-            shutil.move(dest_src, dest)
-
-        shutil.rmtree(dest_tmp, ignore_errors = True)
-
-    def save_project_config(self, dest, prjconf):
-        fpath = os.path.join(dest, 'repodata', 'build.conf')
-        with open(fpath, 'w') as wf:
-            wf.write(prjconf)
-
-    def fix_perm(self, dest):
-        """ Remove all the WOTH permissions except symlink
-        """
-        for r, ds, fs in os.walk(dest):
-            for f in fs + ds:
-                fp = pjoin(r, f)
-                if os.path.islink(fp):
-                    continue
-
-                statinfo = os.stat(fp)
-                if statinfo.st_mode & stat.S_IWOTH:
-                    os.chmod(fp, statinfo.st_mode & ~stat.S_IWOTH)
-
-    def linkrepo(self, trigger_dir, liverepo, dest_repo,
-                 baserepo=None, extrarpms=None, prjconf=None):
-
-        def _get_rawrepo(repo):
-            prj = repo['prj']
-            target = repo['target']
-
-            if os.path.exists("%s/%s" %( trigger_dir, prj) ):
-                f = open("%s/%s" %( trigger_dir, prj), "r")
-                p = f.readline()
-                x = p.rstrip("\x00")
-            else:
-                print "%s/%s does not exist." %( trigger_dir, prj)
-                return None
-
-            raw_repo = pjoin(self.live, x, target)
-            if not os.path.isdir(raw_repo):
-                print 'Raw live repo of Project %s does not exist' % baserepo['prj']
-                return None
-
-            return raw_repo
-
-        prj = liverepo['prj']
-        target = liverepo['target']
-        arch = liverepo['arch']
-
-        project_raw_repo = _get_rawrepo(liverepo)
-        if not project_raw_repo:
-            return False
-
-        try:
-            os.makedirs(dest_repo)
-        except OSError, e:
-            if not os.path.isdir(dest_repo):
-                print 'Cannot create dir:', e
-                return False
-
-        # all the needed path
-        dest_tmp = pjoin(dest_repo, '.tmp')
-        if os.path.isdir(dest_tmp):
-            # some garbage there, clean it up
-            shutil.rmtree(dest_tmp, ignore_errors=True)
-
-        dest_pkg = pjoin(dest_tmp, arch, 'packages')
-        dest_debug = pjoin(dest_tmp, arch, 'debug')
-        dest_src = pjoin(dest_tmp, 'source')
-
-        for d in (dest_pkg, dest_debug, dest_src):
-            os.makedirs(pjoin(d, 'repodata'))
-
-        # create the snapshot of current live repos
-        ret = self.snapshot(project_raw_repo, arch, dest_repo, dest_pkg, dest_debug, dest_src)
-        print "snapshot %s" %ret
-
-        if baserepo:
-            base_raw_repo = _get_rawrepo(baserepo)
-            if base_raw_repo:
-                self.snapshot(base_raw_repo, arch, dest_repo, dest_pkg, dest_debug, dest_src, ignore_exists=True)
-            else:
-                print 'Skip invalid base raw repo for sandbox'
-
-        if extrarpms:
-            self.snapshot(extrarpms, arch, dest_repo, dest_pkg, dest_debug, dest_src, ignore_exists=True)
-
-        # call external 'createrepo' now
-        self.createrepo(dest_pkg, dest_src, dest_debug)
-
-        self.update_package_groups( dest_pkg)
-
-        if prjconf:
-            print 'saving OBS building config to repo'
-            self.save_project_config(dest_pkg, prjconf)
-
-        self.cleanup(dest_repo, dest_tmp, arch)
-
-        # fix the permissions
-        self.fix_perm(dest_repo)
-
-        return True
-
-if __name__ == '__main__':
-    usage = "Usage: %prog [options] <prj> <target> <dest_repo> <arch>"
-    parser = optparse.OptionParser(usage)
-    opts, args = parser.parse_args()
-
-    try:
-        prj, target, dest_repo, arch = args[:4]
-    except ValueError:
-        parser.error('parameter wrong, please see usage')
-        sys.exit(1)
-
-    linker = LinkRepo('/srv/obs/repos')
-    repo = {'prj': prj, 'target': target, 'arch': arch}
-    linker.linkrepo('/srv/obs/repos_sync', repo, dest_repo)
diff --git a/repomaker/release.py b/repomaker/release.py
deleted file mode 100644 (file)
index 181ca73..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-
-from datetime import date, datetime, timedelta
-import os
-
-RELEASE_SPOOL = '/var/spool/repomaker'
-
-def _simple_buildid(today = None):
-    if today is None:
-        today = date.today()
-    return today.strftime('%Y%m%d')
-
-class BuildID:
-    def __init__(self, spool = RELEASE_SPOOL):
-        if not os.path.exists(spool):
-            os.makedirs(spool)
-
-        self.spool = spool
-
-    def _get_latest_build_id(self, partof, release):
-        f = open("%s/%s-%s" %(self.spool, partof, release) )
-        latest = f.readline()
-        f.close()
-        return latest
-
-    def _save_new_build_id(self, id, partof, release):
-        f = open("%s/%s-%s" %(self.spool, partof, release), 'w')
-        f.write(id)
-        f.close()
-
-    def _get_release_number(self, release, today = None):
-        return _simple_buildid(today)
-
-    def _get_current_release(self):
-        return 'tizen'
-
-    def get_new_build_id(self, release = None, partof='release', today=None):
-        if release is None:
-            release = self._get_current_release()
-
-        buildid = self._get_release_number(release, today)
-
-        if os.path.exists("%s/%s-%s" %(self.spool, partof, release) ):
-            latest = self._get_latest_build_id(partof, release)
-            if buildid in latest:
-                # same day, new build number
-                l = latest.split(".")
-                build = int(l[-1]) + 1
-                buildid += '.%s' %build
-            else:
-                buildid += '.1'
-        else:
-            latest = None
-            buildid += '.1'
-
-        self._save_new_build_id(buildid, partof, release)
-
-        new = '_'.join([release, buildid])
-        if latest:
-            old = '_'.join([release, latest])
-        else:
-            old = ''
-
-        return old, new
-
-if __name__ == "__main__":
-    # verify spool
-    print 'Two serial buildid for one day:'
-    bid = BuildID()
-    old_id, new_id = bid.get_new_build_id()
-    print new_id, old_id
-    old_id, new_id = bid.get_new_build_id()
-    print new_id, old_id
-
-    old_id, new_id = bid.get_new_build_id('tizen-99')
-    print new_id, old_id
-    old_id, new_id = bid.get_new_build_id('tizen-99')
-    print new_id, old_id