re-implement scripts to python
authorLin Yang <lin.a.yang@intel.com>
Wed, 25 Jul 2012 02:46:52 +0000 (10:46 +0800)
committerHasan Wan <hasan.wan@intel.com>
Fri, 27 Jul 2012 08:09:20 +0000 (16:09 +0800)
15 files changed:
PolicyCheck.sh [deleted file]
SubmitToOBS.sh [deleted file]
aiaiaicheck.py [new file with mode: 0755]
buildservice.py [new file with mode: 0644]
envparas.py [new file with mode: 0644]
errors.py [new file with mode: 0644]
git.py [new file with mode: 0644]
msger.py [new file with mode: 0644]
mysql.py
obspkg.py [new file with mode: 0644]
policycheck.py [new file with mode: 0755]
rpmlint_wapper.py [new file with mode: 0755]
runner.py [new file with mode: 0644]
submitobs.py [new file with mode: 0755]
utils.py [new file with mode: 0644]

diff --git a/PolicyCheck.sh b/PolicyCheck.sh
deleted file mode 100755 (executable)
index 4387fc4..0000000
+++ /dev/null
@@ -1,341 +0,0 @@
-#!/bin/bash
-#Do policy check when a change is created in gerrit
-
-set -x
-
-SPEC_CHECKER=${WORKSPACE}/spec_checker
-cat > $SPEC_CHECKER <<EOF
-#!/usr/bin/env python
-
-import sys
-import tempfile
-sys.path.insert(1, '/usr/share/rpmlint')
-
-from Filter import setRawOut, printed_messages
-import SpecCheck
-import Pkg
-
-import Filter
-
-def quiet(s):
-    pass
-
-Filter.__print=quiet
-
-spec_file=sys.argv[1]
-
-# the tempfile is designed for python policycheck.py, bash script doesn't use it
-output = tempfile.NamedTemporaryFile()
-setRawOut(output.name)
-
-pkg = Pkg.FakePkg(spec_file)
-check = SpecCheck.SpecCheck()
-check.check_spec(pkg, spec_file)
-pkg.cleanup()
-
-from Filter import _rawout
-_rawout.flush()
-print "rpmlint checked %s: %d errors, %s warnings." % (spec_file, printed_messages["E"], printed_messages["W"])
-for line in output.readlines():
-    line = line.strip().lstrip(spec_file+':').strip()
-    if not line.startswith('W:') and not line.startswith('E:'):
-        line = 'line '+line
-    print '- '+line
-
-EOF
-
-chmod a+x $SPEC_CHECKER
-
-#osc wrapper to handle osc random failure
-OSCCMD=${WORKSPACE}/wrapped_osc
-cat > $OSCCMD << DATA
-#!/usr/bin/python
-import sys, locale
-# this is a hack to make osc work as expected with utf-8 characters,
-# no matter how site.py is set...
-reload(sys)
-loc = locale.getdefaultlocale()[1]
-if not loc:
-    loc = sys.getdefaultencoding()
-sys.setdefaultencoding(loc)
-del sys.setdefaultencoding
-from osc import core, oscerr
-# Injection code for osc.core to fix the empty XML bug
-def solid_get_files_meta(self, revision='latest', skip_service=True):
-    from time import sleep
-    retry_count = 3
-    while retry_count > 0:
-        fm = core.show_files_meta(self.apiurl, self.prjname, self.name,
-                                  revision=revision, meta=self.meta)
-        try:
-            root = core.ET.fromstring(fm)
-            break
-        except:
-            print 'corrupted or empty obs server response ,retrying ...'
-            sleep(1)
-            retry_count -= 1
-    if not retry_count:
-        # all the re-try failed, abort
-        raise oscerr.OscIOError(None, 'cannet fetch files meta xml from server')
-    # look for "too large" files according to size limit and mark them
-    for e in root.findall('entry'):
-        size = e.get('size')
-        if size and self.size_limit and int(size) > self.size_limit \
-            or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
-            e.set('skipped', 'true')
-    return core.ET.tostring(root)
-core.Package.get_files_meta = solid_get_files_meta
-# run
-from osc import commandline, babysitter
-sys.exit(babysitter.run(commandline.Osc()))
-DATA
-chmod +x $OSCCMD
-
-#Update one git project
-update-git-project()
-{
-    pushd .
-    PRJ_PATH=$1
-    PRJ_FULL_NAME=$2
-    PRJ_NAME=$(basename ${PRJ_FULL_NAME})
-
-    if [ ! -d ${PRJ_PATH} ]; then
-        mkdir ${PRJ_PATH} -p
-    fi
-    cd ${PRJ_PATH}
-    retry_num=3
-    while [ $retry_num -ne 0 ]
-    do
-        #try pull the change from git, if failed, delete local code and re-clone
-        if [ -d ${PRJ_NAME} ]; then
-            cd ${PRJ_NAME}
-            git pull 2>&1
-            if [ $? -ne 0 ]; then
-                cd ${PRJ_PATH}
-                rm -rf ${PRJ_PATH}/${PRJ_NAME}
-                git clone ssh://${GERRIT_USERNAME}@${GERRIT_HOSTNAME}:${GERRIT_SSHPORT}/${PRJ_FULL_NAME}
-            else
-                popd
-                return 0                
-            fi
-        else
-            git clone ssh://${GERRIT_USERNAME}@${GERRIT_HOSTNAME}:${GERRIT_SSHPORT}/${PRJ_FULL_NAME}
-        fi
-        if [ $? -eq 0 -a -d ${PRJ_NAME} ]; then
-            popd
-            return 0
-        else
-            let "retry_num=retry_num-1"
-            if [ $retry_num -eq 0 ]; then
-                popd
-                return 1
-            fi
-            rm ${PRJ_PATH}/${PRJ_NAME} -rf
-        fi
-    done
-}
-
-check_obs_target()
-{
-    if [ ! -n "${OBS_STAGING_PROJECT}" -o "${needSR}" = "true" ]; then
-        if [ -n "${destprj}" ]; then
-            destprj=${destprj}" "
-        fi
-        destprj=${destprj}${OBS_DEST_PROJECT}
-
-        #Check whether exist spec file under packaging directory
-        if [ -f ${packagingdir}/${SPECFILE}.spec ]; then
-            #Abstract version from spec file
-            cp ${packagingdir}/${SPECFILE}.spec template.spec
-            egrep "^%prep" template.spec
-            if [ $? -ne 0 ]; then
-                echo "%prep" >> template.spec
-            fi
-            #Abstract project name from spec file
-            sed -e 's/BuildArch:.*//g' -e 's/ExclusiveArch:.*//g' -e 's/^%prep/%prep\necho %{name}\nexit\n/' template.spec > tmp.spec
-            NAME=$(rpmbuild -bp --nodeps --force tmp.spec --define '_topdir .' --define '_builddir .' --define '_sourcedir .' --define '_rpmdir .' --define '_specdir .' --define '_srcrpmdir .' 2>&1 | grep 'echo' | cut -d ' ' -f 3)
-            if [ ! -n "${NAME}" ]; then
-                NAME=$(egrep "Name:[ \t]*" tmp.spec | sed "s/.*Name:[ \t]*\(.*\)$/\1/g")
-            fi
-            rm tmp.spec template.spec
-
-            $OSCCMD -A ${OBS_API_URL} -c ${OBS_OSCRC_PATH} ls ${OBS_DEST_PROJECT} | egrep "^${NAME}$"
-            if [ $? -ne 0 ]; then
-                newpkg=true
-            fi
-        fi
-    fi
-    return
-}
-
-#Check spec file
-check_spec()
-{
-    if [ -d ${packagingdir} ]; then
-        #Check whether exist spec file under packaging directory
-        if [ -f ${packagingdir}/${SPECFILE}.spec ]; then
-           msg=$($SPEC_CHECKER ${packagingdir}/${SPECFILE}.spec)
-            print ${spec_check_msg}
-        else
-            if [ -n "${lack_spec}" ]; then
-                lack_spec=${lack_spec}" "
-            fi
-            lack_spec=${lack_spec}"${SPECFILE}.spec"
-        fi
-    else
-        msg="Error: Do not contain packaging directory!"
-    fi
-    if [ -n "${msg}" ]; then
-        if [ -n "${spec_check_msg}" ]; then
-            spec_check_msg=${spec_check_msg}"
-
-"
-        fi
-        spec_check_msg=${spec_check_msg}${msg}
-        msg=""
-    fi
-    return
-}
-
-PROJECT=$(basename ${GERRIT_PROJECT})
-GERRIT_PATH=$(echo ${GERRIT_PROJECT} | sed "s/^\(.*\)${PROJECT}$/\1/g")
-JENKINS_HOME=$(cd ${WORKSPACE}/../../..; pwd)
-
-#Update git-obs-mapping configuration if don't exist
-if [ ! -f ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml ]; then
-    update-git-project ${JENKINS_HOME}/git/scm scm/git-obs-mapping
-    if [ $? -ne 0 ]; then
-        exit 1
-    fi
-fi
-
-#Update gerrit project from remote
-update-git-project ${JENKINS_HOME}/git/${GERRIT_PATH} ${GERRIT_PROJECT}
-if [ $? -eq 0 ]; then
-    cp ${JENKINS_HOME}/git/${GERRIT_PROJECT} ${WORKSPACE} -rf
-fi
-update-git-project ${WORKSPACE} ${GERRIT_PROJECT}
-if [ $? -ne 0 ]; then
-    exit 1
-fi
-cd ${WORKSPACE}/${PROJECT}
-
-#Fetch the patch from Gerrit
-git fetch ssh://${GERRIT_USERNAME}@${GERRIT_HOSTNAME}:${GERRIT_SSHPORT}/${GERRIT_PROJECT} ${GERRIT_REFSPEC} -t
-git checkout FETCH_HEAD
-   
-packagingdir=$(readlink packaging)
-if [ ! -n "${packagingdir}" ]; then
-    packagingdir="packaging"
-fi
-git show --pretty="format:" --name-only ${GERRIT_PATCHSET_REVISION} | egrep "${packagingdir}/.*\.changes"
-if [ $? -eq 0 ]; then
-    git describe --tags --exact-match ${GERRIT_PATCHSET_REVISION}
-    if [ $? -eq 0 ]; then
-        needSR=true
-    fi
-fi
-
-if [ -d ${packagingdir} ]; then
-    if [ $(find ./${packagingdir} -name "*\.spec" | wc -l) -eq 1 ]; then
-        default_spec=$(find ./${packagingdir} -name "*\.spec" | sed "s/.*${packagingdir}\/\(.*\)\.spec/\1/g")
-    fi
-    if [ ! -n "${default_spec}" ]; then
-        default_spec=${PROJECT}
-    fi
-fi
-
-#Parse OBS target project from git-obs-mapping configuration. Result "@OBS_project#@#@OBS_staging_project#@#@OBS_package" will be recoreded in ${WORKSPACE}/mapping.txt
-xml sel -t -m "/mapping/project[@name='${GERRIT_PROJECT}' and @submission='N'] | /mapping/project[@name='/${GERRIT_PROJECT}' and @submission='N'] | /mapping/project[@name='${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}' and @submission='N'] | /mapping/project[@name='/${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}' and @submission='N']" -v "@name" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml
-if [ $? -ne 0 ]; then
-    xml sel -t -m "/mapping/project[@name='${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}'] | /mapping/project[@name='/${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}']" -v "concat(@OBS_project,'#@#',@OBS_staging_project,'#@#',@OBS_package)" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-    if [ $? -ne 0 -o ! -f ${WORKSPACE}/mapping.txt -o ! -s ${WORKSPACE}/mapping.txt ]; then
-        PATH_NAME="/"${GERRIT_PROJECT}
-        while [ true ];
-        do
-            PATH_NAME=$(echo ${PATH_NAME} | sed "s/^\(.*\)\/[^\/]*$/\1/g")
-            xml sel -t -m "/mapping/default/path[@name='${PATH_NAME}' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}/' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}']/branch[@name='${GERRIT_BRANCH}' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}/']/branch[@name='${GERRIT_BRANCH}' and @submission='N']" -v "@name" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml
-            if [ $? -eq 0 ]; then
-                break
-            fi
-            xml sel -t -m "/mapping/default/path[@name='${PATH_NAME}']/branch[@name='${GERRIT_BRANCH}'] | /mapping/default/path[@name='${PATH_NAME}/']/branch[@name='${GERRIT_BRANCH}']" -v "concat(@OBS_project,'#@#',@OBS_staging_project,'#@#',@OBS_package)" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-            if [ $? -eq 0 -a -f ${WORKSPACE}/mapping.txt -a -s ${WORKSPACE}/mapping.txt ]; then
-                break
-            elif [ ! -n "${PATH_NAME}" ]; then
-                break
-            fi
-        done
-    fi
-fi
-
-if [ -f ${WORKSPACE}/mapping.txt -a -s ${WORKSPACE}/mapping.txt ]; then
-    for line in $(cat ${WORKSPACE}/mapping.txt)
-    do
-        if [ ! -n "${line}" ]; then
-            continue
-        fi
-        OBS_DEST_PROJECT=$(echo ${line} | awk -F '#@#' '{print $1}')
-        OBS_STAGING_PROJECT=$(echo ${line} | awk -F '#@#' '{print $2}')
-        SPECFILE=$(echo ${line} | awk -F '#@#' '{print $3}')
-        if [ ! -n "${OBS_DEST_PROJECT}" ]; then
-            continue
-        fi
-
-        if [ ! -n "${SPECFILE}" ]; then
-            SPECFILE=${default_spec}
-        fi
-
-        check_obs_target
-
-        #if [ -n "${OBS_STAGING_PROJECT}" -a "${needSR}" != "true" ]; then
-        #    git show --pretty="format:" --name-only ${GERRIT_PATCHSET_REVISION} | egrep "${packagingdir}/${SPECFILE}.spec"
-        #    if [ $? -ne 0 ]; then
-        #        continue
-        #    fi
-        #fi
-
-        egrep "${SPECFILE}.spec" ${WORKSPACE}/speclist
-        if [ $? -eq 0 ]; then
-            continue
-        else
-            echo "${SPECFILE}.spec" >> ${WORKSPACE}/speclist
-        fi
-
-        #use rpmlint to check specfile
-        check_spec 
-    done
-else
-    message="[IMPORTANT NOTICE]: The change for ${GERRIT_BRANCH} branch will not be submitted to OBS according configuration in gerrit \"scm/git-obs-mapping\" project. If needed, please modify scm/git-obs-mapping to enable submission to OBS."
-fi
-
-#Post comment back to gerrit
-if [ ! -n "${message}" ]; then
-    message="[IMPORTANT NOTICE]:"
-    if [ -n "${destprj}" ]; then
-        if [ "${newpkg}" = "true" ]; then
-            message=${message}" [New Package]"
-        fi
-        message=${message}" This change will be submitted to OBS ${destprj} project!!!"
-    else
-        message=${message}" This change will not be submitted to OBS. If want to trigger submission to OBS, please make sure this change meets all below criteria.
-- The commit includes changes to the change log file under packaging directory.
-- A tag is created on this commit, and pushed together with the commit to Gerrit at the same time."
-    fi
-fi
-if [ -n "${message}" ]; then
-    ssh -p ${GERRIT_SSHPORT} ${GERRIT_USERNAME}@${GERRIT_HOSTNAME} gerrit review ${GERRIT_CHANGE_NUMBER},${GERRIT_PATCHSET_NUMBER} --message \'"${message}"\' 
-fi
-
-if [ -n "${lack_spec}" ]; then
-    spec_check_msg="Error: Not contain ${lack_spec} under packaging directory!
-
-"${spec_check_msg}
-fi
-if [ -n "${spec_check_msg}" ]; then
-    ssh -p ${GERRIT_SSHPORT} ${GERRIT_USERNAME}@${GERRIT_HOSTNAME} gerrit review ${GERRIT_CHANGE_NUMBER},${GERRIT_PATCHSET_NUMBER} --message \'"${spec_check_msg}"\' 
-fi
-
-#Cleanup and exit
-rm -rf ${WORKSPACE}/* -rf
-exit 0
diff --git a/SubmitToOBS.sh b/SubmitToOBS.sh
deleted file mode 100755 (executable)
index 57368d0..0000000
+++ /dev/null
@@ -1,334 +0,0 @@
-#!/bin/bash
-#This script will submit a merged change to corresponding OBS staging project. If necessary, create SR to request merge to target OBS project.
-
-set -x
-#Cleanup workspace and record execute resutl in mysql DB when script done
-end()
-{
-    rm -rf ${WORKSPACE}/* -rf
-    if [ $1 = success ]; then
-        mysql -h${MYSQL_HOSTNAME} -P${MYSQL_PORT} -u${MYSQL_USERNAME} -p${MYSQL_PASSWORD} -D${MYSQL_DB_NAME} -e "UPDATE ChangeMerged_Event SET state='TRIGGER_SUCCESS' WHERE changeNum='${GERRIT_CHANGE_NUMBER}' and patchsetNum='${GERRIT_PATCHSET_NUMBER}'"
-        exit 0
-    elif [ $1 = retry ]; then
-        mysql -h${MYSQL_HOSTNAME} -P${MYSQL_PORT} -u${MYSQL_USERNAME} -p${MYSQL_PASSWORD} -D${MYSQL_DB_NAME} -e "UPDATE ChangeMerged_Event SET state='TRIGGER_RETRY' WHERE changeNum='${GERRIT_CHANGE_NUMBER}' and patchsetNum='${GERRIT_PATCHSET_NUMBER}'"
-        exit 1
-    else
-        mysql -h${MYSQL_HOSTNAME} -P${MYSQL_PORT} -u${MYSQL_USERNAME} -p${MYSQL_PASSWORD} -D${MYSQL_DB_NAME} -e "UPDATE ChangeMerged_Event SET state='TRIGGER_FAILURE' WHERE changeNum='${GERRIT_CHANGE_NUMBER}' and patchsetNum='${GERRIT_PATCHSET_NUMBER}'"
-        exit 1
-    fi
-}
-
-#Update one git project
-update-git-project()
-{
-    pushd .
-    PRJ_PATH=$1
-    PRJ_FULL_NAME=$2
-    PRJ_NAME=$(basename ${PRJ_FULL_NAME})
-
-    if [ ! -d ${PRJ_PATH} ]; then
-        mkdir ${PRJ_PATH} -p
-    fi
-    cd ${PRJ_PATH}
-    retry_num=3
-    while [ $retry_num -ne 0 ]
-    do
-        #try pull the change from git, if failed, delete local code and re-clone
-        if [ -d ${PRJ_NAME} ]; then
-            cd ${PRJ_NAME}
-            git pull 2>&1
-            if [ $? -ne 0 ]; then
-                cd ${PRJ_PATH}
-                rm -rf ${PRJ_PATH}/${PRJ_NAME}
-                git clone ssh://${GERRIT_USERNAME}@${GERRIT_HOSTNAME}:${GERRIT_SSHPORT}/${PRJ_FULL_NAME}
-            else
-                popd
-                return 0               
-            fi
-        else
-            git clone ssh://${GERRIT_USERNAME}@${GERRIT_HOSTNAME}:${GERRIT_SSHPORT}/${PRJ_FULL_NAME}
-        fi
-        if [ $? -eq 0 -a -d ${PRJ_NAME} ]; then
-            popd
-            return 0
-        else
-            let "retry_num=retry_num-1"
-            if [ $retry_num -eq 0 ]; then
-                popd
-                return 1
-            fi
-            rm ${PRJ_PATH}/${PRJ_NAME} -rf
-        fi
-    done
-}
-
-#Checkout OBS package to local
-obs_checkout()
-{
-    OBS_PROJECT=$1
-    OBS_PACKAGE=$2
-    retry_num=3
-    while [ $retry_num -ne 0 ]
-    do
-        $OSCCMD ls ${OBS_PROJECT} | egrep "^${OBS_PACKAGE}$"
-        if [ $? -eq 0 ]; then
-            #If OBS staging project already contains package, checkout it directly
-            $OSCCMD co ${OBS_PROJECT} ${OBS_PACKAGE}
-        else
-            #Create a new package in OBS staging project
-            dummy=$($OSCCMD ls ${OBS_PROJECT} | head -1)
-            $OSCCMD ls ${OBS_PROJECT} ${dummy}
-            $OSCCMD co ${OBS_PROJECT} ${dummy}
-            (cd ${OBS_PROJECT} && $OSCCMD mkpac ${OBS_PACKAGE})
-            (cd ${OBS_PROJECT}/${OBS_PACKAGE} && $OSCCMD ci --skip-validation --force -m "init package")
-        fi
-        if [ $? -eq 0 -a -d ${OBS_PROJECT}/${OBS_PACKAGE} ]; then
-            if [ "${OBS_STAGING_PROJECT}" != "${OBS_DEST_PROJECT}" ]; then
-                $OSCCMD ls ${OBS_PROJECT} | egrep "^tmp$"
-                if [ $? -ne 0 ]; then
-                    (cd ${OBS_PROJECT} && $OSCCMD mkpac tmp)
-                    (cd ${OBS_PROJECT}/tmp && $OSCCMD ci --skip-validation --force -m "Leave an empty package in here to prevent OBS delete TIZEN:Staging project automatically when all request from here to TIZEN:Main are accepted.")
-                fi
-            fi
-            return 0
-        else
-            let "retry_num=retry_num-1"
-            if [ $retry_num -eq 0 ]; then
-                return 1
-            fi
-            rm ${OBS_PROJECT} -rf
-        fi
-    done
-}
-
-#osc wrapper to handle osc random failure
-OSCCMD=${WORKSPACE}/wrapped_osc
-cat > $OSCCMD << DATA
-#!/usr/bin/python
-import sys, locale
-# this is a hack to make osc work as expected with utf-8 characters,
-# no matter how site.py is set...
-reload(sys)
-loc = locale.getdefaultlocale()[1]
-if not loc:
-    loc = sys.getdefaultencoding()
-sys.setdefaultencoding(loc)
-del sys.setdefaultencoding
-from osc import core, oscerr
-# Injection code for osc.core to fix the empty XML bug
-def solid_get_files_meta(self, revision='latest', skip_service=True):
-    from time import sleep
-    retry_count = 3
-    while retry_count > 0:
-        fm = core.show_files_meta(self.apiurl, self.prjname, self.name,
-                                  revision=revision, meta=self.meta)
-        try:
-            root = core.ET.fromstring(fm)
-            break
-        except:
-            print 'corrupted or empty obs server response ,retrying ...'
-            sleep(1)
-            retry_count -= 1
-    if not retry_count:
-        # all the re-try failed, abort
-        raise oscerr.OscIOError(None, 'cannet fetch files meta xml from server')
-    # look for "too large" files according to size limit and mark them
-    for e in root.findall('entry'):
-        size = e.get('size')
-        if size and self.size_limit and int(size) > self.size_limit \
-            or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
-            e.set('skipped', 'true')
-    return core.ET.tostring(root)
-core.Package.get_files_meta = solid_get_files_meta
-# run
-from osc import commandline, babysitter
-sys.exit(babysitter.run(commandline.Osc()))
-DATA
-chmod +x $OSCCMD
-OSCCMD="${WORKSPACE}/wrapped_osc -A ${OBS_API_URL} -c ${OBS_OSCRC_PATH}"
-
-#Prepare necessary variable
-export http_proxy=""
-LANG=c
-unset LC_ALL
-PROJECT=$(basename ${GERRIT_PROJECT})
-GERRIT_PATH=$(echo ${GERRIT_PROJECT} | sed "s/^\(.*\)${PROJECT}$/\1/g")
-JENKINS_HOME=$(cd ${WORKSPACE}/../../..; pwd)
-
-#Update git-obs-mapping configuration if don't exist
-if [ x${GERRIT_PROJECT} = "xscm/git-obs-mapping" -o ! -f ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml ]; then
-    update-git-project ${JENKINS_HOME}/git/scm scm/git-obs-mapping
-    if [ $? -ne 0 ]; then
-        end retry
-    fi
-    if [ x${GERRIT_PROJECT} = "xscm/git-obs-mapping" ]; then
-        end success
-    fi
-fi
-
-#Parse OBS target project from git-obs-mapping configuration. Result "@OBS_project#@#@OBS_staging_project#@#@OBS_package" will be recoreded in ${WORKSPACE}/mapping.txt
-#Check whether submission of this project is blocked
-xml sel -t -m "/mapping/project[@name='${GERRIT_PROJECT}' and @submission='N'] | /mapping/project[@name='/${GERRIT_PROJECT}' and @submission='N'] | /mapping/project[@name='${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}' and @submission='N'] | /mapping/project[@name='/${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}' and @submission='N']" -v "@name" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-if [ $? -eq 0 -a -f ${WORKSPACE}/mapping.txt -a -s ${WORKSPACE}/mapping.txt ]; then
-    end success
-fi
-#Check whether this project map to a specific OBS project
-xml sel -t -m "/mapping/project[@name='${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}'] | /mapping/project[@name='/${GERRIT_PROJECT}']/branch[@name='${GERRIT_BRANCH}']" -v "concat(@OBS_project,'#@#',@OBS_staging_project,'#@#',@OBS_package)" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-if [ $? -ne 0 -o ! -f ${WORKSPACE}/mapping.txt -o ! -s ${WORKSPACE}/mapping.txt ]; then
-    #Search default section in git-obs-mapping configuration
-    PATH_NAME="/"${GERRIT_PROJECT}
-    while [ true ];
-    do
-        PATH_NAME=$(echo ${PATH_NAME} | sed "s/^\(.*\)\/[^\/]*$/\1/g")
-        #Check whether submission under this directory is blocked
-        xml sel -t -m "/mapping/default/path[@name='${PATH_NAME}' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}/' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}']/branch[@name='${GERRIT_BRANCH}' and @submission='N'] | /mapping/default/path[@name='${PATH_NAME}/']/branch[@name='${GERRIT_BRANCH}' and @submission='N']" -v "@name" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-        if [ $? -eq 0 -a -f ${WORKSPACE}/mapping.txt -a -s ${WORKSPACE}/mapping.txt ]; then
-            end success
-        fi
-        xml sel -t -m "/mapping/default/path[@name='${PATH_NAME}']/branch[@name='${GERRIT_BRANCH}'] | /mapping/default/path[@name='${PATH_NAME}/']/branch[@name='${GERRIT_BRANCH}']" -v "concat(@OBS_project,'#@#',@OBS_staging_project,'#@#',@OBS_package)" -n ${JENKINS_HOME}/git/scm/git-obs-mapping/git-obs-mapping.xml > ${WORKSPACE}/mapping.txt
-        if [ $? -eq 0 -a -f ${WORKSPACE}/mapping.txt -a -s ${WORKSPACE}/mapping.txt ]; then
-            break
-        elif [ ! -n "${PATH_NAME}" ]; then
-            end success
-        fi
-    done
-fi
-
-#Update gerrit project from remote
-update-git-project ${JENKINS_HOME}/git/${GERRIT_PATH} ${GERRIT_PROJECT}
-if [ $? -eq 0 ]; then
-    cp ${JENKINS_HOME}/git/${GERRIT_PROJECT} ${WORKSPACE} -rf
-fi
-update-git-project ${WORKSPACE} ${GERRIT_PROJECT}
-if [ $? -ne 0 ]; then
-    end retry
-fi
-
-cd ${WORKSPACE}/${PROJECT}
-git checkout origin/${GERRIT_BRANCH}
-
-packagingdir=$(readlink packaging)
-if [ ! -n "${packagingdir}" ]; then
-    packagingdir="packaging"
-fi
-
-COMMITLOG=$(git log -1 --format="%h : %s" ${GERRIT_PATCHSET_REVISION})
-
-git show --pretty="format:" --name-only ${GERRIT_PATCHSET_REVISION} | egrep "${packagingdir}/.*\.changes"
-if [ $? -eq 0 ]; then
-    git describe --tags --exact-match ${GERRIT_PATCHSET_REVISION}
-    if [ $? -eq 0 ]; then
-        needSR=true
-    fi
-fi
-
-for line in $(cat ${WORKSPACE}/mapping.txt)
-do
-    if [ ! -n "${line}" ]; then
-        continue
-    fi
-    cd ${WORKSPACE}/${PROJECT}
-    git clean -fd
-
-    OBS_DEST_PROJECT=$(echo ${line} | awk -F '#@#' '{print $1}')
-    OBS_STAGING_PROJECT=$(echo ${line} | awk -F '#@#' '{print $2}')
-    SPECFILE=$(echo ${line} | awk -F '#@#' '{print $3}')
-    if [ ! -n "${OBS_DEST_PROJECT}" ]; then
-        end failure
-    fi
-    if [ ! -n "${OBS_STAGING_PROJECT}" ]; then
-        OBS_STAGING_PROJECT=${OBS_DEST_PROJECT}
-    else
-        if [ "x${needSR}" != "xtrue" ]; then
-            continue
-        fi
-    fi
-
-    #If git-obs-mapping don't indicate use which specfile, use the only one specfile by default
-    if [ ! -n "${SPECFILE}" ]; then
-        if [ -d ${packagingdir} ]; then
-            if [ $(find ./${packagingdir} -name "*\.spec" | wc -l) -eq 1 ]; then
-                SPECFILE=$(find ./${packagingdir} -name "*\.spec" | sed "s/.*${packagingdir}\/\(.*\)\.spec/\1/g")
-            fi
-        fi
-        if [ ! -n "${SPECFILE}" ]; then
-            SPECFILE=${PROJECT}
-        fi
-    fi
-    #Abstract related info from spec file
-    if [ -f ${packagingdir}/${SPECFILE}.spec ]; then
-        cp ${packagingdir}/${SPECFILE}.spec template.spec
-        egrep "^%prep" template.spec
-        if [ $? -ne 0 ]; then
-            echo "%prep" >> template.spec
-        fi
-        sed -e 's/BuildArch:.*//g' -e 's/ExclusiveArch:.*//g' -e 's/^%prep/%prep\necho %{version}\nexit\n/' template.spec > tmp.spec
-        VERSION=$(rpmbuild -bp --nodeps --force tmp.spec --define '_topdir .' --define '_builddir .' --define '_sourcedir .' --define '_rpmdir .' --define '_specdir .' --define '_srcrpmdir .' 2>&1 | grep 'echo' | cut -d ' ' -f 3)
-        if [ ! -n "${VERSION}" ]; then
-            VERSION=$(egrep "Version:[ \t]*" tmp.spec | sed "s/.*Version:[ \t]*\(.*\)$/\1/g")
-        fi
-        sed -e 's/BuildArch:.*//g' -e 's/ExclusiveArch:.*//g' -e 's/^%prep/%prep\necho %{name}\nexit\n/' template.spec > tmp.spec
-        NAME=$(rpmbuild -bp --nodeps --force tmp.spec --define '_topdir .' --define '_builddir .' --define '_sourcedir .' --define '_rpmdir .' --define '_specdir .' --define '_srcrpmdir .' 2>&1 | grep 'echo' | cut -d ' ' -f 3)
-        if [ ! -n "${NAME}" ]; then
-            NAME=$(egrep "Name:[ \t]*" tmp.spec | sed "s/.*Name:[ \t]*\(.*\)$/\1/g")
-        fi
-        sed -e 's/BuildArch:.*//g' -e 's/ExclusiveArch:.*//g' -e 's/^%prep/%prep\necho %{SOURCE0}\nexit\n/' template.spec > tmp.spec
-        TARBALL=$(basename $(rpmbuild -bp --nodeps --force tmp.spec --define '_topdir .' --define '_builddir .' --define '_sourcedir .' --define '_rpmdir .' --define '_specdir .' --define '_srcrpmdir .' 2>&1 | grep 'echo' | cut -d ' ' -f 3))
-        if [ ! -n "${TARBALL}" ]; then
-            sed -e 's/BuildArch:.*//g' -e 's/ExclusiveArch:.*//g' -e 's/^%prep/%prep\necho %{SOURCE}\nexit\n/' template.spec > tmp.spec
-            TARBALL=$(basename $(rpmbuild -bp --nodeps --force tmp.spec --define '_topdir .' --define '_builddir .' --define '_sourcedir .' --define '_rpmdir .' --define '_specdir .' --define '_srcrpmdir .' 2>&1 | grep 'echo' | cut -d ' ' -f 3))
-        fi
-        rm tmp.spec template.spec
-        if [ ! -n "${NAME}" -o ! -n "${VERSION}" ]; then
-            end failure
-        fi
-    else
-        end failure
-    fi
-    #Use gbs to generate tarball
-    gbs export --spec=${packagingdir}/${SPECFILE}.spec 
-
-    cd ${WORKSPACE}
-    OBS_PACKAGE=${NAME}
-    #Checkout OBS package to local
-    obs_checkout ${OBS_STAGING_PROJECT} ${OBS_PACKAGE}
-    if [ $? -ne 0 ]; then
-        end retry
-    fi
-
-    #Update files in local OBS package directory    
-    cd ${WORKSPACE}/${OBS_STAGING_PROJECT}/${OBS_PACKAGE}
-    rm ${WORKSPACE}/${OBS_STAGING_PROJECT}/${OBS_PACKAGE}/* -rf
-    cp ${WORKSPACE}/${PROJECT}/${packagingdir}/* ${WORKSPACE}/${OBS_STAGING_PROJECT}/${OBS_PACKAGE} -rf
-    echo -e "Commit: ${COMMITLOG}\nOwner: ${GERRIT_PATCHSET_UPLOADER_NAME} <${GERRIT_PATCHSET_UPLOADER_EMAIL}>\nGerrit URL: ${GERRIT_CHANGE_URL}\nSubmit Time: $(date +"%x %H:%M:%S")" > ${WORKSPACE}/message.txt
-    #Submit code to OBS staging project and create a request to merger to Trunk
-    retry_num=3
-    while [ $retry_num -ne 0 ]
-    do
-        $OSCCMD addremove &&
-        $OSCCMD ci --skip-validation --force -F ${WORKSPACE}/message.txt &&
-        if [ "x${needSR}" = "xtrue" -a "x${OBS_DEST_PROJECT}" != "x${OBS_STAGING_PROJECT}" ]; then
-            $OSCCMD request list -U ${OBS_USERNAME} ${OBS_DEST_PROJECT} ${OBS_PACKAGE} > ${WORKSPACE}/request.txt &&
-            {
-                REQUEST_ID=$(head -1 ${WORKSPACE}/request.txt | awk '{print $1}')
-                if [ -n "${REQUEST_ID}" ]; then
-                    echo -e "" >> ${WORKSPACE}/message.txt
-                    $OSCCMD request show ${REQUEST_ID} > ${WORKSPACE}/request.txt &&
-                    awk -v key="Message:" '$0==key{p=1;next}/\State:/{p=0}p' ${WORKSPACE}/request.txt >> ${WORKSPACE}/message.txt &&
-                    echo "y" | $OSCCMD sr ${OBS_DEST_PROJECT} ${OBS_PACKAGE} -m "$(cat ${WORKSPACE}/message.txt)" --cleanup &&
-                    break
-                else
-                    $OSCCMD sr ${OBS_DEST_PROJECT} ${OBS_PACKAGE} -m "$(cat ${WORKSPACE}/message.txt)" --cleanup &&
-                    break
-                fi
-            }
-        else
-            break
-        fi
-        let "retry_num=retry_num-1"
-        if [ $retry_num -eq 0 ]; then
-            end retry
-        fi
-    done
-done
-end success
diff --git a/aiaiaicheck.py b/aiaiaicheck.py
new file mode 100755 (executable)
index 0000000..03cc59a
--- /dev/null
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+
+"""This script will use aiaiai to check kernel patch.
+"""
+
+import os
+import tempfile
+import shutil
+
+import runner
+import utils
+import git
+from envparas import *
+
+gerritcmd = 'ssh -p %s %s@%s gerrit' % (GERRIT_SSHPORT, GERRIT_USERNAME, GERRIT_HOSTNAME)
+giturl = 'ssh://%s@%s:%s' % (GERRIT_USERNAME, GERRIT_HOSTNAME, GERRIT_SSHPORT)
+
+def end(rc = 0):
+    shutil.rmtree(tmpdir)
+    exit(rc)
+
+def update_git_project(workdir, prj):
+    result = True
+    prjdir = os.path.join(workdir, prj)
+
+    with utils.Workdir(workdir):
+        if os.path.isdir(prjdir):
+            gitprj = git.Git(prjdir)
+            if not gitprj.pull():
+                shutil.rmtree(prjdir)
+                if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                    result = False
+        else:
+            if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                result = False
+
+    if not result:
+        shutil.rmtree(prjdir)
+    return result
+
+if __name__ == '__main__':
+
+    # current workspace dir
+    workspace = os.getcwd()
+    # Jenkins home dir
+    homedir = os.path.abspath(os.path.join(workspace, '../../..'))
+    tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+    prjdir = os.path.join(tmpdir, GERRIT_PROJECT)
+    prjpath, prj = os.path.split(GERRIT_PROJECT)
+
+    aiaiaicmd = '%s/aiaiai/aiaiai-test-patchset' % homedir
+
+    if utils.retry(update_git_project, (os.path.join(homedir, 'git'), GERRIT_PROJECT)):
+        shutil.copytree(os.path.join(homedir, 'git', GERRIT_PROJECT), prjdir, True)
+    if not utils.retry(update_git_project, (tmpdir, GERRIT_PROJECT)):
+        end(1)
+
+    mygit = git.Git(prjdir)
+    mygit.fetch('%s/%s' % (giturl, GERRIT_PROJECT), GERRIT_REFSPEC, '-t')
+    mygit.checkout('FETCH_HEAD')
+    patch = mygit.format_patch('-n1 -o %s' % tmpdir)[0]
+    basecommit = mygit.get_base_commit(GERRIT_PATCHSET_REVISION)
+    mygit.checkout(basecommit)
+    print '%s -v -j18 --bisectability --sparse --smatch --coccinelle --cppcheck -i %s . ivi_gen_defconfig,i386' % (aiaiaicmd, patch)
+
+    with utils.Workdir(prjdir):
+        outs = runner.outs('%s -v -j18 --bisectability --sparse --smatch --coccinelle --cppcheck -i %s . ivi_gen_defconfig,i386' % (aiaiaicmd, patch))
+
+    print outs
+    msg = []
+    for line in outs.splitlines():
+        if line:
+            msg.append(line)
+    if msg:
+        runner.quiet('%s %s %s,%s --message \'"%s"\'' % (gerritcmd, 'review', GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER, 'aiaiai check result:\n- '+'\n'.join(msg)))
+    end()
diff --git a/buildservice.py b/buildservice.py
new file mode 100644 (file)
index 0000000..4287c5c
--- /dev/null
@@ -0,0 +1,1234 @@
+#
+# buildservice.py - Buildservice API support for Yabsc
+#
+
+# Copyright (C) 2008 James Oakley <jfunk@opensuse.org>
+# Copyright (C) 2010, 2011, 2012 Intel, Inc.
+
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+
+import os
+import string
+import sys
+import shutil
+import tempfile
+import time
+import urllib2
+import M2Crypto
+import xml.etree.cElementTree as ElementTree
+from collections import defaultdict
+
+import errors
+from osc import conf, core
+
+class ObsError(Exception):
+    pass
+
+# Injection code for osc.core to fix the empty XML bug
+def solid_get_files_meta(self, revision='latest', skip_service=True):
+    from time import sleep
+    import msger
+
+    retry_count = 3
+    while retry_count > 0:
+        fm = core.show_files_meta(self.apiurl, self.prjname, self.name,
+                                  revision=revision, meta=self.meta)
+        try:
+            root = core.ET.fromstring(fm)
+            break
+        except:
+            msger.warning('corrupted or empty obs server response ,retrying ...')
+            sleep(1)
+            retry_count -= 1
+
+    if not retry_count:
+        # all the re-try failed, abort
+        raise ObsError('cannet fetch files meta xml from server')
+
+    # look for "too large" files according to size limit and mark them
+    for e in root.findall('entry'):
+        size = e.get('size')
+        if size and self.size_limit and int(size) > self.size_limit \
+            or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
+            e.set('skipped', 'true')
+    return core.ET.tostring(root)
+
+core.Package.get_files_meta = solid_get_files_meta
+
+class _Metafile:
+    """
+    _Metafile(url, input, change_is_required=False, file_ext='.xml')
+
+    Implementation on osc.core.metafile that does not print to stdout
+    """
+
+    def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
+        self.url = url
+        self.change_is_required = change_is_required
+
+        (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext, dir = '/tmp')
+
+        f = os.fdopen(fd, 'w')
+        f.write(''.join(input))
+        f.close()
+
+        self.hash_orig = core.dgst(self.filename)
+
+    def sync(self):
+        hash = core.dgst(self.filename)
+        if self.change_is_required == True and hash == self.hash_orig:
+            os.unlink(self.filename)
+            return True
+
+        # don't do any exception handling... it's up to the caller what to do in case
+        # of an exception
+        core.http_PUT(self.url, file=self.filename)
+        os.unlink(self.filename)
+        return True
+
+# helper functions for class _ProjectFlags
+def _flag2bool(flag):
+    """
+    _flag2bool(flag) -> Boolean
+    Returns a boolean corresponding to the string 'enable', or 'disable'
+    """
+
+    if flag == 'enable':
+        return True
+    elif flag == 'disable':
+        return False
+
+def _bool2flag(b):
+    """
+    _bool2flag(b) -> String
+
+    Returns 'enable', or 'disable' according to boolean value b
+    """
+    if b == True:
+        return 'enable'
+    elif b == False:
+        return 'disable'
+
+class _ProjectFlags(object):
+    """
+    _ProjectFlags(bs, project)
+
+    Represents the flags in project through the BuildService object bs
+    """
+
+    def __init__(self, bs, project):
+        self.bs = bs
+        self.tree = ElementTree.fromstring(self.bs.getProjectMeta(project))
+
+        # The "default" flags, when undefined
+        self.defaultflags = {'build': True,
+                             'publish': True,
+                             'useforbuild': True,
+                             'debuginfo': False}
+
+        # Figure out what arches and repositories are defined
+        self.arches = {}
+        self.repositories = {}
+
+        # Build individual repository list
+        for repository in self.tree.findall('repository'):
+            repodict = {'arches': {}}
+            self.__init_flags_in_dict(repodict)
+            for arch in repository.findall('arch'):
+                repodict['arches'][arch.text] = {}
+                self.__init_flags_in_dict(repodict['arches'][arch.text])
+                # Add placeholder in global arches
+                self.arches[arch.text] = {}
+            self.repositories[repository.get('name')] = repodict
+
+        # Initialise flags in global arches
+        for archdict in self.arches.values():
+            self.__init_flags_in_dict(archdict)
+
+        # A special repository representing the global and global arch flags
+        self.allrepositories = {'arches': self.arches}
+        self.__init_flags_in_dict(self.allrepositories)
+
+        # Now populate the structures from the xml data
+        for flagtype in ('build', 'publish', 'useforbuild', 'debuginfo'):
+            flagnode = self.tree.find(flagtype)
+            if flagnode:
+                for node in flagnode:
+                    repository = node.get('repository')
+                    arch = node.get('arch')
+
+                    if repository and arch:
+                        self.repositories[repository]['arches'][arch][flagtype] = _flag2bool(node.tag)
+                    elif repository:
+                        self.repositories[repository][flagtype] = _flag2bool(node.tag)
+                    elif arch:
+                        self.arches[flagtype] = _flag2bool(node.tag)
+                    else:
+                        self.allrepositories[flagtype] = _flag2bool(node.tag)
+
+    def __init_flags_in_dict(self, d):
+        """
+        __init_flags_in_dict(d)
+
+        Initialize all build flags to None in d
+        """
+        d.update({'build': None,
+                  'publish': None,
+                  'useforbuild': None,
+                  'debuginfo': None})
+
+    def save(self):
+        """
+        save()
+
+        Save flags
+        """
+
+        for flagtype in ('build', 'publish', 'useforbuild', 'debuginfo'):
+            # Clear if set
+            flagnode = self.tree.find(flagtype)
+            if flagnode:
+                self.tree.remove(flagnode)
+
+            # Generate rule nodes
+            rulenodes = []
+
+            # globals
+            if self.allrepositories[flagtype] != None:
+                rulenodes.append(ElementTree.Element(_bool2flag(self.allrepositories[flagtype])))
+            for arch in self.arches:
+                if self.arches[arch][flagtype] != None:
+                    rulenodes.append(ElementTree.Element(_bool2flag(self.arches[arch][flagtype]), arch=arch))
+
+            # repositories
+            for repository in self.repositories:
+                if self.repositories[repository][flagtype] != None:
+                    rulenodes.append(ElementTree.Element(_bool2flag(self.repositories[repository][flagtype]), repository=repository))
+                for arch in self.repositories[repository]['arches']:
+                    if self.repositories[repository]['arches'][arch][flagtype] != None:
+                        rulenodes.append(ElementTree.Element(_bool2flag(self.repositories[repository]['arches'][arch][flagtype]), arch=arch, repository=repository))
+
+            # Add nodes to tree
+            if rulenodes:
+                from pprint import pprint
+                pprint(rulenodes)
+                flagnode = ElementTree.Element(flagtype)
+                self.tree.insert(3, flagnode)
+                for rulenode in rulenodes:
+                    flagnode.append(rulenode)
+
+        print ElementTree.tostring(self.tree)
+
+class BuildService(object):
+    """Interface to Build Service API"""
+
+    def __init__(self, apiurl=None, oscrc=None):
+        if oscrc:
+            try:
+                conf.get_config(override_conffile = oscrc)
+            except OSError, e:
+                if e.errno == 1:
+                    # permission problem, should be the chmod(0600) issue
+                    raise RuntimeError, 'Current user has no write permission for specified oscrc: %s' % oscrc
+
+                raise # else
+            except urllib2.URLError:
+                raise errors.ObsError("invalid service apiurl: %s" % apiurl)
+        else:
+            conf.get_config()
+
+        if apiurl:
+            self.apiurl = apiurl
+        else:
+            self.apiurl = conf.config['apiurl']
+
+    def getAPIServerList(self):
+        """getAPIServerList() -> list
+
+        Get list of API servers configured in .oscrc
+        """
+        apiservers = []
+        for host in conf.config['api_host_options'].keys():
+            apiurl = "%s://%s" % (conf.config['scheme'], host)
+        return apiservers
+
+    # the following two alias api are added temporarily for compatible safe
+    def is_new_package(self, dst_project, dst_package):
+        return self.isNewPackage(dst_project, dst_package)
+    def gen_req_info(self, reqid, show_detail = True):
+        return self.genRequestInfo(reqid, show_detail)
+
+    def isNewPackage(self, dst_project, dst_package):
+        """Check whether the dst pac is a new one
+        """
+
+        new_pkg = False
+        try:
+            core.meta_exists(metatype = 'pkg',
+                        path_args = (core.quote_plus(dst_project), core.quote_plus(dst_package)),
+                        create_new = False,
+                        apiurl = self.apiurl)
+        except urllib2.HTTPError, e:
+            if e.code == 404:
+                new_pkg = True
+            else:
+                raise e
+        return new_pkg
+
+    def isNewProject(self, project):
+        """Check whether the specified prject is a new one
+        """
+
+        new_prj = False
+        try:
+            core.meta_exists(metatype = 'prj',
+                        path_args = (core.quote_plus(project)),
+                        create_new = False,
+                        apiurl = self.apiurl)
+        except urllib2.HTTPError, e:
+            if e.code == 404:
+                new_prj = True
+            else:
+                raise errors.ObsError("%s" % e)
+        except (urllib2.URLError, M2Crypto.m2urllib2.URLError, \
+                                  M2Crypto.SSL.SSLError), e:
+            raise errors.ObsError("%s" % e)
+        return new_prj
+
+    def genRequestInfo(self, reqid, show_detail = True):
+        """Generate formated diff info for request,
+        mainly used by notification mails from BOSS
+        """
+
+        def _gen_request_diff():
+            """ Recommanded getter: request_diff can get req diff info even if req is accepted/declined
+            """
+            reqdiff = ''
+
+            try:
+                diff = core.request_diff(self.apiurl, reqid)
+
+                try:
+                    reqdiff += diff.decode('utf-8')
+                except UnicodeDecodeError:
+                    try:
+                        reqdiff += diff.decode('iso-8859-1')
+                    except UnicodeDecodeError:
+                        pass
+
+            except (AttributeError, urllib2.HTTPError), e:
+                return None
+
+            return reqdiff
+
+        def _gen_server_diff(req):
+            """ Reserved getter: get req diff, if and only if the recommanded getter failed 
+            """
+            reqdiff = ''
+
+            src_project = req.actions[0].src_project
+            src_package = req.actions[0].src_package
+            src_rev = req.actions[0].src_rev
+            try:
+                dst_project = req.actions[0].dst_project
+                dst_package = req.actions[0].dst_package
+            except AttributeError:
+                dst_project = req.actions[0].tgt_project
+                dst_package = req.actions[0].tgt_package
+
+            # Check whether the dst pac is a new one
+            new_pkg = False
+            try:
+                core.meta_exists(metatype = 'pkg',
+                            path_args = (core.quote_plus(dst_project), core.quote_plus(dst_package)),
+                            create_new = False,
+                            apiurl = self.apiurl)
+            except urllib2.HTTPError, e:
+                if e.code == 404:
+                    new_pkg = True
+                else:
+                    raise e
+
+            if new_pkg:
+                src_fl = self.getSrcFileList(src_project, src_package, src_rev)
+
+                spec_file = None
+                yaml_file = None
+                for f in src_fl:
+                    if f.endswith(".spec"):
+                        spec_file = f
+                    elif f.endswith(".yaml"):
+                       yaml_file = f
+
+                reqdiff += 'This is a NEW package in %s project.\n' % dst_project
+
+                reqdiff += 'The files in the new package:\n'
+                reqdiff += '%s/\n' % src_package
+                reqdiff += '  |__  ' + '\n  |__  '.join(src_fl)
+
+                if yaml_file:
+                    reqdiff += '\n\nThe content of the YAML file, %s:\n' % (yaml_file)
+                    reqdiff += '===================================================================\n'
+                    reqdiff += self.getSrcFileContent(src_project, src_package, yaml_file, src_rev)
+                    reqdiff += '\n===================================================================\n'
+
+                if spec_file:
+                    reqdiff += '\n\nThe content of the spec file, %s:\n' % (spec_file)
+                    reqdiff += '===================================================================\n'
+                    reqdiff += self.getSrcFileContent(src_project, src_package, spec_file, src_rev)
+                    reqdiff += '\n===================================================================\n'
+                else:
+                    reqdiff += '\n\nspec file NOT FOUND!\n'
+
+            else:
+                try:
+                    diff = core.server_diff(self.apiurl,
+                                        dst_project, dst_package, None,
+                                        src_project, src_package, src_rev, False)
+
+                    try:
+                        reqdiff += diff.decode('utf-8')
+                    except UnicodeDecodeError:
+                        try:
+                            reqdiff += diff.decode('iso-8859-1')
+                        except UnicodeDecodeError:
+                            pass
+
+                except urllib2.HTTPError, e:
+                    e.osc_msg = 'Diff not possible'
+                    return ''
+
+            return reqdiff
+
+        ####################################
+        # function implementation start here
+
+        req = core.get_request(self.apiurl, reqid)
+        try:
+            req.reviews = []
+            reqinfo = unicode(req)
+        except UnicodeEncodeError:
+            reqinfo = u''
+
+        if show_detail:
+            diff = _gen_request_diff()
+            if diff is None:
+                diff = _gen_server_diff(req)
+
+            reqinfo += diff
+
+        # the result, in unicode string
+        return reqinfo
+
+    def getRequestList(self, dst_prj, dst_pkg, user='', req_type='submit', req_state=['new','review']):
+        if not user:
+            user = conf.get_apiurl_usr(self.apiurl)
+        return core.get_request_list(self.apiurl, dst_prj, dst_pkg, user, req_type=req_type, req_state=req_state)
+
+    def submitReq(self, src_prj, src_pkg, dst_prj, dst_pkg, msg='', orev=None, src_update='cleanup'):
+        return core.create_submit_request(self.apiurl,
+                                          src_prj, src_pkg,
+                                          dst_prj, dst_pkg,
+                                          msg, orev=orev, src_update=src_update)
+
+    def reqAccept(self, reqid, msg=''):
+        """ This method is called to accept a request
+            Success: return None
+            Failed:  return string of error message
+        """
+
+        try:
+            core.change_request_state(self.apiurl, reqid, 'accepted', message=msg, supersed=None)
+        except Exception, e:
+            return str(e)
+
+        return None
+
+    def reqDecline(self, reqid, msg=''):
+        """ This method is called to decline a request
+            Success: return None
+            Failed:  return string of error message
+        """
+
+        try:
+            core.change_request_state(self.apiurl, reqid, 'declined', message=msg, supersed=None)
+        except Exception, e:
+            return str(e)
+
+        return None
+
+    def reqRevoke(self, reqid, msg=''):
+        """ This method is called to revoke a request
+            Success: return None
+            Failed:  return string of error message
+        """
+
+        try:
+            core.change_request_state(self.apiurl, reqid, 'revoked', message=msg, supersed=None)
+        except Exception, e:
+            return str(e)
+
+        return None
+
+    def reqReview(self, reqid, user='', group='', msg=''):
+        """ This method is called to add review msg to a request
+            Success: return None
+            Failed:  return string of error message
+        """
+        try:
+            query = { 'cmd': 'addreview' }
+            if user:
+                query['by_user'] = user
+            if group:
+                query['by_group'] = group
+            u = core.makeurl(self.apiurl, ['request', reqid], query=query)
+            f = core.http_POST(u, data=msg)
+            root = ElementTree.parse(f).getroot()
+            root.get('code')
+        except Exception, e:
+            return str(e)
+
+        return None
+
+    def reqSupersede(self, reqid, msg='', supersed=None):
+        try:
+            core.change_request_state(self.apiurl, reqid, 'superseded', msg, supersed)
+        except Exception, e:
+            return str(e)
+
+        return None
+
+    def getSrcFileList(self, project, package, revision=None):
+        """ get source file list of prj/pac
+        """
+
+        return core.meta_get_filelist(self.apiurl, project, package, expand=True, revision=revision)
+
+    def getSrcFileContent(self, project, package, path, revision=None):
+        """ Cat remote file
+        """
+
+        rev = core.show_upstream_xsrcmd5(self.apiurl, project, package, revision=revision)
+        if rev:
+            query = { 'rev': rev }
+        else:
+            query = None
+
+        u = core.makeurl(self.apiurl, ['source', project, package, core.pathname2url(path)], query=query)
+
+        content = ''
+        for buf in core.streamfile(u, core.http_GET, core.BUFSIZE):
+            content += buf
+
+        # return unicode str
+        return content.decode('utf8')
+
+    def getSrcFileChecksum(self, project, package, path, revision=None):
+        """ getSrcFileChecksum(project, package, path, revision=None) -> string
+            returns source md5 of a source file
+        """
+
+        query = {}
+        query['expand'] = 1
+        if revision:
+            query['rev'] = revision
+
+        u = core.makeurl(self.apiurl, ['source', project, package], query=query)
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+
+        for node in root.findall('entry'):
+            if node.get('name') == path:
+                return node.get('md5')
+
+        return None
+
+    def getPackageChecksum(self, project, package, revision=None):
+        """ getPackageChecksum(project, package, revision=None) -> string
+            returns srcmd5 of a package
+        """
+
+        query = {}
+        query['expand'] = 1
+        if revision:
+            query['rev'] = revision
+
+        u = core.makeurl(self.apiurl, ['source', project, package], query=query)
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+
+        return root.get('srcmd5')
+
+    def getLinkinfo(self, project, package, revision=None):
+        """ getLinkinfo(project, package, revision=None) -> (linked_prj, linked_pkg, linked_srcmd5)
+            returns link info of a prj/pkg
+        """
+
+        query = {}
+        query['expand'] = 1
+        if revision:
+            query['rev'] = revision
+
+        u = core.makeurl(self.apiurl, ['source', project, package], query=query)
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+
+        for node in root.findall('linkinfo'):
+            return (node.get('project'), node.get('package'), node.get('srcmd5'))
+
+        return None
+
+    def getUserData(self, user, *tags):
+        """getUserData() -> str
+
+        Get the user data
+        """
+        return core.get_user_data(self.apiurl, user, *tags)
+
+    def getUserName(self):
+        """getUserName() -> str
+
+        Get the user name associated with the current API server
+        """
+        return conf.config['api_host_options'][self.apiurl]['user']
+
+    def getProjectList(self):
+        """getProjectList() -> list
+
+        Get list of projects
+        """
+        return [project for project in core.meta_get_project_list(self.apiurl) if project != 'deleted']
+
+    def getWatchedProjectList(self):
+        """getWatchedProjectList() -> list
+
+        Get list of watched projects
+        """
+        username = self.getUserName()
+        tree = ElementTree.fromstring(''.join(core.get_user_meta(self.apiurl, username)))
+        projects = []
+        watchlist = tree.find('watchlist')
+        if watchlist:
+            for project in watchlist.findall('project'):
+                projects.append(project.get('name'))
+        homeproject = 'home:%s' % username
+        if not homeproject in projects and homeproject in self.getProjectList():
+            projects.append(homeproject)
+        return projects
+
+    def watchProject(self, project):
+        """
+        watchProject(project)
+
+        Watch project
+        """
+        username = self.getUserName()
+        data = core.meta_exists('user', username, create_new=False, apiurl=self.apiurl)
+        url = core.make_meta_url('user', username, self.apiurl)
+
+        person = ElementTree.fromstring(''.join(data))
+        watchlist = person.find('watchlist')
+        if not watchlist:
+            watchlist = ElementTree.SubElement(person, 'watchlist')
+        ElementTree.SubElement(watchlist, 'project', name=str(project))
+
+        f = _Metafile(url, ElementTree.tostring(person))
+        f.sync()
+
+    def unwatchProject(self, project):
+        """
+        watchProject(project)
+
+        Watch project
+        """
+        username = self.getUserName()
+        data = core.meta_exists('user', username, create_new=False, apiurl=self.apiurl)
+        url = core.make_meta_url('user', username, self.apiurl)
+
+        person = ElementTree.fromstring(''.join(data))
+        watchlist = person.find('watchlist')
+        for node in watchlist:
+            if node.get('name') == str(project):
+                watchlist.remove(node)
+                break
+
+        f = _Metafile(url, ElementTree.tostring(person))
+        f.sync()
+
+    def getRepoState(self, project):
+        targets = {}
+        tree = ElementTree.fromstring(''.join(core.show_prj_results_meta(self.apiurl, project)))
+        for result in tree.findall('result'):
+            targets[('/'.join((result.get('repository'), result.get('arch'))))] = result.get('state')
+        return targets
+
+    def getResults(self, project):
+        """getResults(project) -> (dict, list)
+
+        Get results of a project. Returns (results, targets)
+
+        results is a dict, with package names as the keys, and lists of result codes as the values
+
+        targets is a list of targets, corresponding to the result code lists
+        """
+        results = {}
+        targets = []
+        tree = ElementTree.fromstring(''.join(core.show_prj_results_meta(self.apiurl, project)))
+        for result in tree.findall('result'):
+            targets.append('/'.join((result.get('repository'), result.get('arch'))))
+            for status in result.findall('status'):
+                package = status.get('package')
+                code = status.get('code')
+                if not package in results:
+                    results[package] = []
+                results[package].append(code)
+        return (results, targets)
+
+    def getDiff(self, sprj, spkg, dprj, dpkg, rev):
+        diff = ''
+        diff += core.server_diff(self.apiurl, sprj, spkg, None,
+                 dprj, dpkg, rev, False, True)
+        return diff
+
+    def getTargets(self, project):
+        """
+        getTargets(project) -> list
+
+        Get a list of targets for a project
+        """
+        targets = []
+        tree = ElementTree.fromstring(''.join(core.show_project_meta(self.apiurl, project)))
+        for repo in tree.findall('repository'):
+            for arch in repo.findall('arch'):
+                targets.append('%s/%s' % (repo.get('name'), arch.text))
+        return targets
+
+    def getPackageStatus(self, project, package):
+        """
+        getPackageStatus(project, package) -> dict
+
+        Returns the status of a package as a dict with targets as the keys and status codes as the
+        values
+        """
+        status = {}
+        tree = ElementTree.fromstring(''.join(core.show_results_meta(self.apiurl, project, package)))
+        for result in tree.findall('result'):
+            target = '/'.join((result.get('repository'), result.get('arch')))
+            statusnode = result.find('status')
+            code = statusnode.get('code')
+            details = statusnode.find('details')
+            if details is not None:
+                code += ': ' + details.text
+            status[target] = code
+        return status
+
+    def getProjectDiff(self, src_project, dst_project):
+        diffs = []
+
+        packages = self.getPackageList(src_project)
+        for src_package in packages:
+            diff = core.server_diff(self.apiurl,
+                                dst_project, src_package, None,
+                                src_project, src_package, None, False)
+            diffs.append(diff)
+
+        return '\n'.join(diffs)
+
+    def getPackageList(self, prj, deleted=None):
+        query = {}
+        if deleted:
+           query['deleted'] = 1
+
+        u = core.makeurl(self.apiurl, ['source', prj], query)
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+        return [ node.get('name') for node in root.findall('entry') ]
+
+    def getBinaryList(self, project, target, package):
+        """
+        getBinaryList(project, target, package) -> list
+
+        Returns a list of binaries for a particular target and package
+        """
+
+        (repo, arch) = target.split('/')
+        return core.get_binarylist(self.apiurl, project, repo, arch, package)
+
+    def getBinary(self, project, target, package, file, path):
+        """
+        getBinary(project, target, file, path)
+
+        Get binary 'file' for 'project' and 'target' and save it as 'path'
+        """
+
+        (repo, arch) = target.split('/')
+        core.get_binary_file(self.apiurl, project, repo, arch, file, target_filename=path, package=package)
+
+    def getBuildLog(self, project, target, package, offset=0):
+        """
+        getBuildLog(project, target, package, offset=0) -> str
+
+        Returns the build log of a package for a particular target.
+
+        If offset is greater than 0, return only text after that offset. This allows live streaming
+        """
+
+        (repo, arch) = target.split('/')
+        u = core.makeurl(self.apiurl, ['build', project, repo, arch, package, '_log?nostream=1&start=%s' % offset])
+        return core.http_GET(u).read()
+
+    def getWorkerStatus(self):
+        """
+        getWorkerStatus() -> list of dicts
+
+        Get worker status as a list of dictionaries. Each dictionary contains the keys 'id',
+        'hostarch', and 'status'. If the worker is building, the dict will additionally contain the
+        keys 'project', 'package', 'target', and 'starttime'
+        """
+
+        url = core.makeurl(self.apiurl, ['build', '_workerstatus'])
+        f = core.http_GET(url)
+        tree = ElementTree.parse(f).getroot()
+        workerstatus = []
+        for worker in tree.findall('building'):
+            d = {'id': worker.get('workerid'),
+                 'status': 'building'}
+            for attr in ('hostarch', 'project', 'package', 'starttime'):
+                d[attr] = worker.get(attr)
+            d['target'] = '/'.join((worker.get('repository'), worker.get('arch')))
+            d['started'] = time.asctime(time.localtime(float(worker.get('starttime'))))
+            workerstatus.append(d)
+        for worker in tree.findall('idle'):
+            d = {'id': worker.get('workerid'),
+                 'hostarch': worker.get('hostarch'),
+                 'status': 'idle'}
+            workerstatus.append(d)
+        return workerstatus
+
+    def getWaitStats(self):
+        """
+        getWaitStats() -> list
+
+        Returns the number of jobs in the wait queue as a list of (arch, count)
+        pairs
+        """
+
+        url = core.makeurl(self.apiurl, ['build', '_workerstatus'])
+        f = core.http_GET(url)
+        tree = ElementTree.parse(f).getroot()
+        stats = []
+        for worker in tree.findall('waiting'):
+            stats.append((worker.get('arch'), int(worker.get('jobs'))))
+        return stats
+
+    def getSubmitRequests(self):
+        """
+        getSubmitRequests() -> list of dicts
+
+        """
+
+        url = core.makeurl(self.apiurl, ['search', 'request', '?match=submit'])
+        f = core.http_GET(url)
+        tree = ElementTree.parse(f).getroot()
+        submitrequests = []
+        for sr in tree.findall('request'):
+            if sr.get('type') != "submit":
+                continue
+
+            d = {'id': int(sr.get('id'))}
+            sb = sr.findall('submit')[0]
+            src = sb.findall('source')[0]
+            d['srcproject'] = src.get('project')
+            d['srcpackage'] = src.get('package')
+            dst = sb.findall('target')[0]
+            d['dstproject'] = dst.get('project')
+            d['dstpackage'] = dst.get('package')
+            d['state'] = sr.findall('state')[0].get('name')
+
+            submitrequests.append(d)
+        submitrequests.sort(key=lambda x: x['id'])
+        return submitrequests
+
+    def rebuild(self, project, package, target=None, code=None):
+        """
+        rebuild(project, package, target, code=None)
+
+        Rebuild 'package' in 'project' for 'target'. If 'code' is specified,
+        all targets with that code will be rebuilt
+        """
+
+        if target:
+            (repo, arch) = target.split('/')
+        else:
+            repo = None
+            arch = None
+        return core.rebuild(self.apiurl, project, package, repo, arch, code)
+
+    def abortBuild(self, project, package=None, target=None):
+        """
+        abort(project, package=None, target=None)
+
+        Abort build of a package or all packages in a project
+        """
+
+        if target:
+            (repo, arch) = target.split('/')
+        else:
+            repo = None
+            arch = None
+        return core.abortbuild(self.apiurl, project, package, arch, repo)
+
+    def getBuildHistory(self, project, package, target):
+        """
+        getBuildHistory(project, package, target) -> list
+
+        Get build history of package for target as a list of tuples of the form
+        (time, srcmd5, rev, versrel, bcnt)
+        """
+
+        (repo, arch) = target.split('/')
+        u = core.makeurl(self.apiurl, ['build', project, repo, arch, package, '_history'])
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+
+        r = []
+        for node in root.findall('entry'):
+            rev = int(node.get('rev'))
+            srcmd5 = node.get('srcmd5')
+            versrel = node.get('versrel')
+            bcnt = int(node.get('bcnt'))
+            t = time.localtime(int(node.get('time')))
+            t = time.strftime('%Y-%m-%d %H:%M:%S', t)
+
+            r.append((t, srcmd5, rev, versrel, bcnt))
+        return r
+
+    def getCommitLog(self, project, package, revision=None):
+        """
+        getCommitLog(project, package, revision=None) -> list
+
+        Get commit log for package in project. If revision is set, get just the
+        log for that revision.
+
+        Each log is a tuple of the form (rev, srcmd5, version, time, user,
+        comment)
+        """
+
+        u = core.makeurl(self.apiurl, ['source', project, package, '_history'])
+        f = core.http_GET(u)
+        root = ElementTree.parse(f).getroot()
+
+        r = []
+        revisions = root.findall('revision')
+        revisions.reverse()
+        for node in revisions:
+            rev = int(node.get('rev'))
+            if revision and rev != int(revision):
+                continue
+            srcmd5 = node.find('srcmd5').text
+            version = node.find('version').text
+            user = node.find('user').text
+            try:
+                comment = node.find('comment').text
+            except:
+                comment = '<no message>'
+            t = time.localtime(int(node.find('time').text))
+            t = time.strftime('%Y-%m-%d %H:%M:%S', t)
+
+            r.append((rev, srcmd5, version, t, user, comment))
+        return r
+
+    def getProjectMeta(self, project):
+        """
+        getProjectMeta(project) -> string
+
+        Get XML metadata for project
+        """
+
+        return ''.join(core.show_project_meta(self.apiurl, project))
+
+    def getProjectData(self, project, tag):
+        """
+        getProjectData(project, tag) -> list
+        
+        Return a string list if node has text, else return the values dict list
+        """
+
+        data = []
+        tree = ElementTree.fromstring(self.getProjectMeta(project))
+        nodes = tree.findall(tag)
+        if nodes:
+            for node in nodes:
+                node_value = {}
+                for key in node.keys():
+                    node_value[key] = node.get(key)
+
+                if node_value:
+                    data.append(node_value)
+                else:
+                    data.append(node.text)
+
+        return data
+
+    def getProjectPersons(self, project, role):
+        """
+        getProjectPersons(project, role) -> list
+        
+        Return a userid list in this project with this role
+        """
+
+        userids = []
+        persons = self.getProjectData(project, 'person')
+        for person in persons:
+            if person.has_key('role') and person['role'] == role:
+                userids.append(person['userid'])
+
+        return userids
+
+    def getProjectDevel(self, project):
+        """
+        getProjectDevel(project) -> tuple (devel_prj, devel_pkg)
+
+        Return the devel tuple of a project if it has the node, else return None
+        """
+
+        devels = self.getProjectData(project, 'devel')
+        for devel in devels:
+            if devel.has_key('project') and devel.has_key('package'):
+                return (devel['project'], devel['package'])
+
+        return None
+
+    def getProjectLink(self, project):
+        """
+        getProjectLink(project) -> string
+
+        Return the linked project of a project if it has the node, else return None
+        """
+
+        links = self.getProjectData(project, 'link')
+        for link in links:
+            if link.has_key('project'):
+                return link['project']
+
+        return None
+
+    def deleteProject(self, project):
+        """
+        deleteProject(project)
+        
+        Delete the specific project
+        """
+
+        try:
+            core.delete_project(self.apiurl, project)
+        except Exception:
+            return False
+            
+        return True
+
+    def getPackageMeta(self, project, package):
+        """
+        getPackageMeta(project, package) -> string
+
+        Get XML metadata for package in project
+        """
+
+        return ''.join(core.show_package_meta(self.apiurl, project, package))
+
+    def getPackageData(self, project, package, tag):
+        """
+        getPackageData(project, package, tag) -> list
+        
+        Return a string list if node has text, else return the values dict list
+        """
+
+        data = []
+        tree = ElementTree.fromstring(self.getPackageMeta(project, package))
+        nodes = tree.findall(tag)
+        if nodes:
+            for node in nodes:
+                node_value = {}
+                for key in node.keys():
+                    node_value[key] = node.get(key)
+
+                if node_value:
+                    data.append(node_value)
+                else:
+                    data.append(node.text)
+
+        return data
+
+    def getPackagePersons(self, project, package, role):
+        """
+        getPackagePersons(project, package, role) -> list
+        
+        Return a userid list in the package with this role
+        """
+
+        userids = []
+        persons = self.getPackageData(project, package, 'person')
+        for person in persons:
+            if person.has_key('role') and person['role'] == role:
+                userids.append(person['userid'])
+
+        return userids
+
+    def getPackageDevel(self, project, package):
+        """
+        getPackageDevel(project, package) -> tuple (devel_prj, devel_pkg)
+        
+        Return the devel tuple of a package if it has the node, else return None
+        """
+
+        devels = self.getPackageData(project, package, 'devel')
+        for devel in devels:
+            if devel.has_key('project') and devel.has_key('package'):
+                return (devel['project'], devel['package'])
+
+        return None
+
+    def deletePackage(self, project, package):
+        """
+        deletePackage(project, package)
+        
+        Delete the specific package in project
+        """
+
+        try:
+            core.delete_package(self.apiurl, project, package)
+        except Exception:
+            return False
+            
+        return True
+
+    def projectFlags(self, project):
+        """
+        projectFlags(project) -> _ProjectFlags
+
+        Return a _ProjectFlags object for manipulating the flags of project
+        """
+
+        return _ProjectFlags(self, project)
+
+    def checkout(self, prj, pkg, rev='latest'):
+        """ checkout the package to current dir with link expanded
+        """
+
+        core.checkout_package(self.apiurl, prj, pkg, rev, prj_dir=prj, expand_link=True)
+
+    def findPac(self, wd='.'):
+        """Get the single Package object for specified dir
+          the 'wd' should be a working dir for one single pac
+        """
+
+        if core.is_package_dir(wd):
+            return core.findpacs([wd])[0]
+        else:
+            return None
+
+    def mkPac(self, prj, pkg):
+        """Create empty package for new one under CWD
+        """
+
+        core.make_dir(self.apiurl, prj, pkg, pathname = '.')
+
+        pkg_path = os.path.join(prj, pkg)
+        shutil.rmtree(pkg_path, ignore_errors = True)
+        os.chdir(prj)
+        core.createPackageDir(pkg)
+
+    def submit(self, msg, wd='.'):
+        if not core.is_package_dir(wd):
+            # TODO show some error message
+            return
+
+        pac = core.findpacs([wd])[0]
+        prj = os.path.normpath(os.path.join(pac.dir, os.pardir))
+        pac_path = os.path.basename(os.path.normpath(pac.absdir))
+        files = {}
+        files[pac_path] = pac.todo
+        try:
+            core.Project(prj).commit(tuple([pac_path]), msg=msg, files=files)
+        except urllib2.HTTPError, e:
+            raise errors.ObsError('%s' % e)   
+        core.store_unlink_file(pac.absdir, '_commit_msg')
+
+    def branchPkg(self, src_project, src_package, rev=None, target_project=None, target_package=None):
+        """Create branch package from `src_project/src_package`
+          arguments:
+            rev: revision of src project/package
+            target_project: name of target proj, use default one if None
+            target_package: name of target pkg, use the same as asrc if None
+        """
+
+        if target_project is None:
+            target_project = 'home:%s:branches:%s' \
+                             % (conf.get_apiurl_usr(self.apiurl), src_project)
+
+        if target_package is None:
+            target_package = src_package
+
+        exists, targetprj, targetpkg, srcprj, srcpkg = \
+            core.branch_pkg(self.apiurl,
+                            src_project,
+                            src_package,
+                            rev=rev,
+                            target_project=target_project,
+                            target_package=target_package,
+                            force=True)
+
+        return (targetprj, targetpkg)
+
+    def get_buildconfig(self, prj, repository):
+        return core.get_buildconfig(self.apiurl, prj, repository)
+
+    def get_ArchitectureList(self, prj, target):
+        """
+        return the list of Archictecture of the target of the projectObsName for a OBS server.
+        """
+        url = core.makeurl(self.apiurl,['build', prj, target])
+        f = core.http_GET(url)
+        if f == None:
+            return None
+
+        aElement = ElementTree.fromstring(''.join(f.readlines()))
+        result = []
+        for directory in aElement:
+            for entry in directory.getiterator():
+                result.append(entry.get("name"))
+
+        return result
+
+    def get_results(self, prj, package):
+        try:
+            results = defaultdict(dict)
+            build_status = core.get_results(self.apiurl, prj, package)
+            for res in build_status:
+                repo, arch, status = res.split()
+                results[repo][arch] = status
+            return results
+        except (M2Crypto.m2urllib2.URLError, M2Crypto.SSL.SSLError), err:
+            raise errors.ObsError(str(err))
+
+    def get_buildlog(self, prj, package, repository, arch, offset = 0):
+        """prints out the buildlog on stdout"""
+        all_bytes = string.maketrans('', '')
+        remove_bytes = all_bytes[:10] + all_bytes[11:32]
+        try:
+            log = self.getBuildLog(prj, '%s/%s' % (repository, arch), package)
+            sys.stdout.write(log.translate(all_bytes, remove_bytes))
+        except (M2Crypto.m2urllib2.URLError, M2Crypto.SSL.SSLError), err:
+            raise errors.ObsError(str(err))
diff --git a/envparas.py b/envparas.py
new file mode 100644 (file)
index 0000000..df3981c
--- /dev/null
@@ -0,0 +1,47 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+
+"""This file will use some environment variables to initialise global variables.
+"""
+
+import os
+
+envargs = ['OBS_DEST_PROJECT',
+           'OBS_STAGING_PROJECT',
+           'POLICY_MAPPING',
+           'GERRIT_BRANCH',
+           'GERRIT_CHANGE_NUMBER',
+           'GERRIT_CHANGE_ID',
+           'GERRIT_PATCHSET_NUMBER',
+           'GERRIT_PATCHSET_REVISION',
+           'GERRIT_REFSPEC',
+           'GERRIT_PROJECT',
+           'GERRIT_CHANGE_SUBJECT',
+           'GERRIT_CHANGE_URL',
+           'GERRIT_CHANGE_OWNER',
+           'GERRIT_CHANGE_OWNER_NAME',
+           'GERRIT_CHANGE_OWNER_EMAIL',
+           'GERRIT_PATCHSET_UPLOADER',
+           'GERRIT_PATCHSET_UPLOADER_NAME',
+           'GERRIT_PATCHSET_UPLOADER_EMAIL',
+           'GERRIT_CHANGE_SUBMITTER',
+           'GERRIT_CHANGE_SUBMITTER_NAME',
+           'GERRIT_CHANGE_SUBMITTER_EMAIL',
+           'GERRIT_HOSTNAME',
+           'GERRIT_USERNAME',
+           'GERRIT_SSHPORT',
+           'GERRIT_EVENT_TYPE',
+           'OBS_API_URL',
+           'OBS_USERNAME',
+           'OBS_OSCRC_PATH',
+           'MYSQL_HOSTNAME',
+           'MYSQL_PORT', 
+           'MYSQL_USERNAME',
+           'MYSQL_PASSWORD',
+           'MYSQL_DB_NAME',
+           'MAPPING_PROJECT']
+
+__all__ = envargs
+
+for element in envargs:
+    exec(element+'=os.getenv(element)')
diff --git a/errors.py b/errors.py
new file mode 100644 (file)
index 0000000..028e16b
--- /dev/null
+++ b/errors.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2011 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+class CmdError(Exception):
+    """An exception base class for all command calling errors."""
+    keyword = ''
+
+    def __str__(self):
+        return self.keyword + str(self.args[0])
+
+class Usage(CmdError):
+    keyword = '<usage>'
+
+    def __str__(self):
+        return self.keyword + str(self.args[0]) + \
+                ', please use "--help" for more info'
+
+class ConfigError(CmdError):
+    keyword = '<config>'
+
+class ObsError(CmdError):
+    keyword = '<obs>'
+
+class GitError(CmdError):
+    keyword = '<git>'
+
+class UnpackError(CmdError):
+    keyword = '<unpack>'
+
+class FormatError(CmdError):
+    keyword = '<format>'
+
+class QemuError(CmdError):
+    keyword = '<qemu>'
+
+class Abort(CmdError):
+    keyword = ''
+
+class UrlError(CmdError):
+    keyword = '<urlgrab>'
diff --git a/git.py b/git.py
new file mode 100644 (file)
index 0000000..1383bbe
--- /dev/null
+++ b/git.py
@@ -0,0 +1,361 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2011 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os
+import re
+
+# internal modules
+import runner
+import errors
+import msger
+from utils import Workdir
+
+class Git:
+    def __init__(self, path):
+        if not os.path.isdir(os.path.join(path, '.git')):
+            raise errors.GitInvalid(path)
+
+        self.path = os.path.abspath(path)
+        self._git_dir = os.path.join(path, '.git')
+
+        # as cache
+        self.cur_branch = None
+        self.branches = None
+
+    def _is_sha1(self, val):
+        sha1_re = re.compile(r'[0-9a-f]{40}$')
+        return True if sha1_re.match(val) else False
+
+    def _exec_git(self, command, args=[]):
+        """Exec a git command and return the output
+        """
+
+        cmd = ['git', command] + args
+
+        cmdln = ' '.join(cmd)
+        msger.debug('run command: %s' % cmdln)
+
+        with Workdir(self.path):
+            ret, outs = runner.runtool(cmdln)
+
+        #if ret:
+        #    raise errors.GitError("command error for: %s" % cmdln)
+
+        return ret, outs
+
+    def status(self, *args):
+        ret, outs = self._exec_git('status', ['-s'] + list(args))
+
+        sts = {}
+        for line in outs.splitlines():
+            st = line[:2]
+            if st not in sts:
+                sts[st] = [line[2:].strip()]
+            else:
+                sts[st].append(line[2:].strip())
+
+        return sts
+
+    def ls_files(self):
+        """Return the files list
+        """
+        return filter(None, self._exec_git('ls-files')[1].splitlines())
+
+    def rev_parse(self, name):
+        """ Find the SHA1 of a given name commit id"""
+        options = [ "--quiet", "--verify", name ]
+        cmd = ['git', 'rev-parse']
+        ret, commit = runner.runtool(' '.join(cmd + options))
+        if ret == 0:
+            return commit.strip()
+        else:
+            return None
+
+    def create_branch(self, branch, rev=None):
+        if rev and not self._is_sha1(rev):
+            rev = self.rev_parse(rev)
+        if not branch:
+            raise errors.GitError('Branch name should not be None')
+
+        options = [branch, rev, '-f']
+        self._exec_git('branch', options)
+
+    def _get_branches(self):
+        """Return the branches list, current working branch is the first
+        element.
+        """
+        branches = []
+        for line in self._exec_git('branch', ['--no-color'])[1].splitlines():
+            br = line.strip().split()[-1]
+
+            if line.startswith('*'):
+                current_branch = br
+
+            branches.append(br)
+
+        return (current_branch, branches)
+
+    def get_branches(self):
+        if not self.cur_branch or not self.branches:
+            self.cur_branch, self.branches = \
+                self._get_branches()
+
+        return (self.cur_branch, self.branches)
+
+    def is_clean(self):
+        """does the repository contain any uncommitted modifications"""
+
+        gitsts = self.status()
+        if 'M ' in gitsts or ' M' in gitsts or \
+           'A ' in gitsts or ' A' in gitsts or \
+           'D ' in gitsts or ' D' in gitsts:
+            return False
+        else:
+            return True
+
+    def has_branch(self, br, remote=False):
+        """Check if the repository has branch 'br'
+          @param remote: only liste remote branches
+        """
+
+        if remote:
+            options = [ '--no-color', '-r' ]
+
+            for line in self._exec_git('branch', options)[1].splitlines():
+                rbr = line.strip().split()[-1]
+                if br == rbr:
+                    return True
+
+            return False
+
+        else:
+            return (br in self.get_branches()[1])
+
+    def checkout(self, br):
+        """checkout repository branch 'br'
+        """
+        options = [br]
+        with Workdir(self.path):
+            self._exec_git('checkout', options)
+
+    def clean_branch(self, br):
+        """Clean up repository branch 'br'
+        """
+
+        options = ['-dfx']
+        with Workdir(self.path):
+            self.checkout(br)
+            runner.quiet('rm .git/index')
+            self._exec_git('clean', options)
+
+    def commit_dir(self, unpack_dir, msg, branch = 'master', other_parents=None,
+                   author={}, committer={}, create_missing_branch=False):
+
+        for key, val in author.items():
+            if val:
+                os.environ['GIT_AUTHOR_%s' % key.upper()] = val
+        for key, val in committer.items():
+            if val:
+                os.environ['GIT_COMMITTER_%s' % key.upper()] = val
+
+        os.environ['GIT_WORK_TREE'] = unpack_dir
+        options = ['.', '-f']
+        self._exec_git("add", options)
+
+        changed = not self.is_clean()
+        if changed:
+            options = ['--quiet','-a', '-m %s' % msg,]
+            self._exec_git("commit", options)
+
+        commit_id = self._exec_git('log', ['--oneline', '-1'])[1].split()[0]
+
+        del os.environ['GIT_WORK_TREE']
+        for key, val in author.items():
+            if val:
+                del os.environ['GIT_AUTHOR_%s' % key.upper()]
+        for key, val in committer.items():
+            if val:
+                del os.environ['GIT_COMMITTER_%s' % key.upper()]
+
+        self._exec_git('reset', ['--hard', commit_id])
+
+        return commit_id if changed else None
+
+    def find_tag(self, tag):
+        """find the specify version from the repository"""
+        args = ['-l', tag]
+        ret = self._exec_git('tag', args)[1]
+        if ret:
+            return True
+        return False
+
+    def commits(self, since=None, until=None, paths=None, options=None,
+                first_parent=False):
+        """
+        get commits from since to until touching paths
+
+        @param options: list of options past to git log
+        @type  options: list of strings
+        """
+
+        args = ['--pretty=format:%H']
+
+        if options:
+            args += options
+
+        if first_parent:
+            args += [ "--first-parent" ]
+
+        if since and until:
+            assert(self.rev_parse(since))
+            assert(self.rev_parse(until))
+            args += ['%s..%s' % (since, until)]
+
+        if paths:
+            args += [ "--", paths ]
+
+        commits = self._exec_git('log', args)[1]
+
+        return [ commit.strip() for commit in commits.split('\n') ]
+
+    def get_config(self, name):
+        """Gets the config value associated with name"""
+        return self._exec_git('config', [ name ] )[1][:-1]
+
+    def get_commit_info(self, commit):
+        """Given a commit name, return a dictionary of its components,
+        including id, author, email, subject, and body."""
+        out = self._exec_git('log',
+                             ['--pretty=format:%h%n%an%n%ae%n%s%n%b%n',
+                              '-n1', commit])[1].split('\n')
+        return {'id' : out[0].strip(),
+                'author' : out[1].strip(),
+                'email' : out[2].strip(),
+                'subject' : out[3].rstrip(),
+                'body' : [line.rstrip() for line in  out[4:]]}
+
+    def create_tag(self, name, msg, commit):
+        """Creat a tag with name at commit""" 
+        if self.rev_parse(commit) is None:
+            raise errors.GitError('%s is invalid commit ID' % commit)
+        options = [name, '-m %s' % msg, commit]
+        self._exec_git('tag', options)
+
+    def merge(self, commit):
+        """ merge the git tree specified by commit to current branch"""
+        if self.rev_parse(commit) is None and not self.find_tag(commit):
+            raise errors.GitError('%s is invalid commit ID or tag' % commit)
+
+        options = [commit]
+        self._exec_git('merge', options)
+
+    @staticmethod
+    def _formatlize(version):
+        return version.replace('~', '_').replace(':', '%')
+
+    @staticmethod
+    def version_to_tag(format, version):
+        return format % dict(version=Git._formatlize(version))
+
+    @classmethod
+    def create(klass, path, description=None, bare=False):
+        """
+        Create a repository at path
+        @path: where to create the repository
+        """
+        abspath = os.path.abspath(path)
+        options = []
+        if bare:
+            options = [ '--bare' ]
+            git_dir = ''
+        else:
+            options = []
+            git_dir = '.git'
+
+        try:
+            if not os.path.exists(abspath):
+                os.makedirs(abspath)
+
+            with Workdir(abspath):
+                cmd = ['git', 'init'] + options;
+                runner.quiet(' '.join(cmd))
+            if description:
+                with file(os.path.join(abspath, git_dir, "description"), 'w') as f:
+                    description += '\n' if description[-1] != '\n' else ''
+                    f.write(description)
+            return klass(abspath)
+        except OSError, err:
+            raise errors.GitError("Cannot create Git repository at '%s': %s"
+                                     % (abspath, err[1]))
+        return None
+
+    def show(self, *args):
+        """show commit details
+        """
+        with Workdir(self.path):
+            ret, outs = self._exec_git('show', list(args))
+            if not ret:
+                return outs.splitlines()
+
+    def fetch(self, *args):
+        """Download objects and refs from another repository
+        """
+        with Workdir(self.path):
+            self._exec_git('fetch', list(args))
+
+    def describe(self, *args):
+        """Show the most recent tag that is reachable from a commit
+        """
+        with Workdir(self.path):
+            ret, outs = self._exec_git('describe', list(args))
+            if not ret:
+                return outs
+
+    @staticmethod
+    def clone(workdir, *args):
+        """Clone a repository into a new directory
+        """
+        with Workdir(workdir):
+            self._exec_git('clone', args)
+
+    def pull(self, *args):
+        """Fetch from and merge with another repository or a local branch
+        """
+        with Workdir(self.path):
+            ret, outs = self._exec_git('pull', list(args))
+            if ret:
+                return False
+            else:
+                return True
+
+    def clean(self, *args):
+        with Workdir(self.path):
+            self._exec_git('clean', list(args))
+
+    def get_base_commit(self, commit):
+        out = self._exec_git('log',
+                             ['--pretty=format:%H',
+                              '-n2', commit])[1].split('\n')
+        return out[1].strip()
+
+    def format_patch(self, *args):
+        with Workdir(self.path):
+            ret, outs = self._exec_git('format-patch', list(args))
+        print 'format_patch',ret, outs, args
+        if not ret:
+            return outs.splitlines()
diff --git a/msger.py b/msger.py
new file mode 100644 (file)
index 0000000..cd6ef23
--- /dev/null
+++ b/msger.py
@@ -0,0 +1,449 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2009, 2010, 2011 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os, sys
+import re
+import time
+
+__ALL__ = ['set_mode',
+           'get_loglevel',
+           'set_loglevel',
+           'set_logfile',
+           'enable_logstderr',
+           'disable_logstderr',
+           'raw',
+           'debug',
+           'verbose',
+           'info',
+           'warning',
+           'error',
+           'ask',
+           'pause',
+           'waiting',
+           'PrintBuf',
+           'PrintBufWrapper',
+          ]
+
+# COLORs in ANSI
+INFO_COLOR = 32 # green
+WARN_COLOR = 33 # yellow
+ERR_COLOR  = 31 # red
+ASK_COLOR  = 34 # blue
+NO_COLOR = 0
+
+# save the timezone info at import time
+HOST_TIMEZONE = time.timezone
+
+PREFIX_RE = re.compile('^<(.*?)>\s*(.*)', re.S)
+
+INTERACTIVE = True
+
+LOG_LEVEL = 1
+LOG_LEVELS = {
+                'quiet': 0,
+                'normal': 1,
+                'verbose': 2,
+                'debug': 3,
+                'never': 4,
+             }
+
+LOG_FILE_FP = None
+LOG_CONTENT = ''
+CATCHERR_BUFFILE_FD = -1
+CATCHERR_BUFFILE_PATH = None
+CATCHERR_SAVED_2 = -1
+
+# save the orignal stdout/stderr at the very start
+STDOUT = sys.stdout
+STDERR = sys.stderr
+
+# Configure gbp logging
+import gbp.log
+gbp.log.logger.format = '%(color)s%(levelname)s: %(coloroff)s%(message)s'
+
+# Mapping for gbs->gbp log levels
+GBP_LOG_LEVELS = {
+                    'quiet': gbp.log.Logger.ERROR,
+                    'normal': gbp.log.Logger.INFO,
+                    'verbose': gbp.log.Logger.DEBUG,
+                    'debug': gbp.log.Logger.DEBUG,
+                    'never': gbp.log.Logger.ERROR
+                 }
+
+class PrintBuf(object):
+    """Object to buffer the output of 'print' statement string
+    """
+
+    def __init__(self):
+        self.buf1 = \
+        self.buf2 = \
+        self.old1 = \
+        self.old2 = None
+
+    def start(self):
+        """Start to buffer, redirect stdout to string
+        """
+
+        if get_loglevel() != 'debug':
+            import StringIO
+            self.buf1 = StringIO.StringIO()
+            self.buf2 = StringIO.StringIO()
+
+            self.old1 = sys.stdout
+            self.old2 = sys.stderr
+            sys.stdout = self.buf1
+            sys.stderr = self.buf2
+
+    def stop(self):
+        """Stop buffer, restore the original stdout, and flush the
+        buffer string, return the content
+        """
+
+        if self.buf1:
+            msg1 = self.buf1.getvalue().strip()
+            msg2 = self.buf2.getvalue().strip()
+            self.buf1.close()
+            self.buf2.close()
+
+            sys.stdout = self.old1
+            sys.stderr = self.old2
+
+            self.buf1 = \
+            self.buf2 = \
+            self.old1 = \
+            self.old2 = None
+
+            return (msg1, msg2)
+
+        return ('', '')
+
+class PrintBufWrapper(object):
+    """Wrapper class for another class, to catch the print output and
+    handlings.
+    """
+
+    def __init__(self, wrapped_class, msgfunc_1, msgfunc_2, *args, **kwargs):
+        """Arguments:
+          wrapped_class: the class to be wrapped
+          msgfunc_1: function to deal with msg from stdout(1)
+          msgfunc_2: function to deal with msg from stderr(2)
+          *args, **kwargs: the original args of wrapped_class
+        """
+
+        self.pbuf = PrintBuf()
+        self.func1 = msgfunc_1
+        self.func2 = msgfunc_2
+
+        self.pbuf.start()
+        self.wrapped_inst = wrapped_class(*args, **kwargs)
+        stdout_msg, stderr_msg = self.pbuf.stop()
+        if stdout_msg:
+            self.func1(stdout_msg)
+        if stderr_msg:
+            self.func2(stderr_msg)
+
+    def __getattr__(self, attr):
+        orig_attr = getattr(self.wrapped_inst, attr)
+        if callable(orig_attr):
+            def hooked(*args, **kwargs):
+                self.pbuf.start()
+                try:
+                    result = orig_attr(*args, **kwargs)
+                except:
+                    raise
+                finally:
+                    stdout_msg, stderr_msg = self.pbuf.stop()
+                    if stdout_msg:
+                        self.func1(stdout_msg)
+                    if stderr_msg:
+                        self.func2(stderr_msg)
+
+                return result
+
+            return hooked
+        else:
+            return orig_attr
+
+def _general_print(head, color, msg = None, stream = None, level = 'normal'):
+    global LOG_CONTENT
+
+    if LOG_LEVELS[level] > LOG_LEVEL:
+        # skip
+        return
+
+    if stream is None:
+        stream = STDOUT
+
+    errormsg = ''
+    if CATCHERR_BUFFILE_FD > 0:
+        size = os.lseek(CATCHERR_BUFFILE_FD , 0, os.SEEK_END)
+        os.lseek(CATCHERR_BUFFILE_FD, 0, os.SEEK_SET)
+        errormsg = os.read(CATCHERR_BUFFILE_FD, size)
+        os.ftruncate(CATCHERR_BUFFILE_FD, 0)
+
+    if LOG_FILE_FP:
+        if errormsg:
+            LOG_CONTENT += errormsg
+
+        if msg and msg.strip():
+            timestr = time.strftime("[%m/%d %H:%M:%S] ",
+                                    time.gmtime(time.time() - HOST_TIMEZONE))
+            LOG_CONTENT += timestr + msg.strip() + '\n'
+
+    if errormsg:
+        _color_print('', NO_COLOR, errormsg, stream, level)
+
+    _color_print(head, color, msg, stream, level)
+
+def _color_print(head, color, msg, stream, _level):
+    colored = True
+    if color == NO_COLOR or \
+       not stream.isatty() or \
+       os.getenv('ANSI_COLORS_DISABLED') is not None:
+        colored = False
+
+    if head.startswith('\r'):
+        # need not \n at last
+        newline = False
+    else:
+        newline = True
+
+    if colored:
+        head = '\033[%dm%s:\033[0m ' % (color, head)
+        if not newline:
+            # ESC cmd to clear line
+            head = '\033[2K' + head
+    else:
+        if head:
+            head += ': '
+            if head.startswith('\r'):
+                head = head.lstrip()
+                newline = True
+
+    if msg is not None:
+        stream.write('%s%s' % (head, msg))
+        if newline:
+            stream.write('\n')
+
+    stream.flush()
+
+def _color_perror(head, color, msg, level = 'normal'):
+    if CATCHERR_BUFFILE_FD > 0:
+        _general_print(head, color, msg, STDOUT, level)
+    else:
+        _general_print(head, color, msg, STDERR, level)
+
+def _split_msg(head, msg):
+    if isinstance(msg, list):
+        msg = '\n'.join(map(str, msg))
+
+    if msg.startswith('\n'):
+        # means print \n at first
+        msg = msg.lstrip()
+        head = '\n' + head
+
+    elif msg.startswith('\r'):
+        # means print \r at first
+        msg = msg.lstrip()
+        head = '\r' + head
+
+    match = PREFIX_RE.match(msg)
+    if match:
+        head += ' <%s>' % match.group(1)
+        msg = match.group(2)
+
+    return head, msg
+
+def get_loglevel():
+    return (k for k, v in LOG_LEVELS.items() if v==LOG_LEVEL).next()
+
+def set_loglevel(level):
+    global LOG_LEVEL
+    if level not in LOG_LEVELS:
+        # no effect
+        return
+
+    LOG_LEVEL = LOG_LEVELS[level]
+
+    # set git-buildpackage log level
+    gbp.log.logger.set_level(GBP_LOG_LEVELS[level])
+
+def set_interactive(mode=True):
+    global INTERACTIVE
+    if mode:
+        INTERACTIVE = True
+    else:
+        INTERACTIVE = False
+
+def raw(msg=''):
+    _general_print('', NO_COLOR, msg)
+
+def info(msg):
+    head, msg = _split_msg('info', msg)
+    _general_print(head, INFO_COLOR, msg)
+
+def verbose(msg):
+    head, msg = _split_msg('verbose', msg)
+    _general_print(head, INFO_COLOR, msg, level = 'verbose')
+
+def warning(msg):
+    head, msg = _split_msg('warn', msg)
+    _color_perror(head, WARN_COLOR, msg)
+
+def debug(msg):
+    head, msg = _split_msg('debug', msg)
+    _color_perror(head, ERR_COLOR, msg, level = 'debug')
+
+def error(msg):
+    head, msg = _split_msg('error', msg)
+    _color_perror(head, ERR_COLOR, msg)
+    sys.exit(1)
+
+def waiting(func):
+    """
+    Function decorator to show simple waiting message for
+    long time operations.
+    """
+
+    import functools
+
+    @functools.wraps(func)
+    def _wait_with_print(*args, **kwargs):
+        import threading
+
+        class _WaitingTimer(threading.Thread):
+            def __init__(self):
+                threading.Thread.__init__(self)
+                self.event = threading.Event()
+                self.waited = False
+
+            def run(self):
+                while not self.event.is_set():
+                    # put the waiting above the actual
+                    # printing to avoid unnecessary msg
+                    self.event.wait(1)
+                    if self.event.is_set():
+                        break
+
+                    self.waited = True
+                    STDERR.write('.')
+                    STDERR.flush()
+
+            def stop(self):
+                self.event.set()
+
+                if self.waited:
+                    STDERR.write('\n')
+                    STDERR.flush()
+
+        timer = _WaitingTimer()
+        timer.start()
+
+        try:
+            out = func(*args, **kwargs)
+        except:
+            raise
+        finally:
+            timer.stop()
+
+        return out
+
+    return _wait_with_print
+
+def ask(msg, default=True):
+    _general_print('\rQ', ASK_COLOR, '')
+    try:
+        if default:
+            msg += '(Y/n) '
+        else:
+            msg += '(y/N) '
+        if INTERACTIVE:
+            while True:
+                repl = raw_input(msg)
+                if repl.lower() == 'y':
+                    return True
+                elif repl.lower() == 'n':
+                    return False
+                elif not repl.strip():
+                    # <Enter>
+                    return default
+
+                # else loop
+        else:
+            if default:
+                msg += ' Y'
+            else:
+                msg += ' N'
+            _general_print('', NO_COLOR, msg)
+
+            return default
+    except KeyboardInterrupt:
+        sys.stdout.write('\n')
+        sys.exit(2)
+
+def pause(msg=None):
+    if INTERACTIVE:
+        _general_print('\rQ', ASK_COLOR, '')
+        if msg is None:
+            msg = 'press <ENTER> to continue ...'
+        raw_input(msg)
+
+def set_logfile(fpath):
+    global LOG_FILE_FP
+
+    def _savelogf():
+        if LOG_FILE_FP:
+            if not os.path.exists(os.path.dirname(LOG_FILE_FP)):
+                os.makedirs(os.path.dirname(LOG_FILE_FP))
+            fhandle = open(LOG_FILE_FP, 'w')
+            fhandle.write(LOG_CONTENT)
+            fhandle.close()
+
+    if LOG_FILE_FP is not None:
+        warning('duplicate log file configuration')
+
+    LOG_FILE_FP = os.path.abspath(os.path.expanduser(fpath))
+
+    import atexit
+    atexit.register(_savelogf)
+
+def enable_logstderr(fpath):
+    global CATCHERR_BUFFILE_FD
+    global CATCHERR_BUFFILE_PATH
+    global CATCHERR_SAVED_2
+
+    if os.path.exists(fpath):
+        os.remove(fpath)
+    CATCHERR_BUFFILE_PATH = fpath
+    CATCHERR_BUFFILE_FD = os.open(CATCHERR_BUFFILE_PATH, os.O_RDWR|os.O_CREAT)
+    CATCHERR_SAVED_2 = os.dup(2)
+    os.dup2(CATCHERR_BUFFILE_FD, 2)
+
+def disable_logstderr():
+    global CATCHERR_BUFFILE_FD
+    global CATCHERR_BUFFILE_PATH
+    global CATCHERR_SAVED_2
+
+    raw(msg=None) # flush message buffer and print it
+    os.dup2(CATCHERR_SAVED_2, 2)
+    os.close(CATCHERR_SAVED_2)
+    os.close(CATCHERR_BUFFILE_FD)
+    os.unlink(CATCHERR_BUFFILE_PATH)
+    CATCHERR_BUFFILE_FD = -1
+    CATCHERR_BUFFILE_PATH = None
+    CATCHERR_SAVED_2 = -1
index 8dc3801..fe4f4c6 100644 (file)
--- a/mysql.py
+++ b/mysql.py
@@ -15,6 +15,7 @@ class Database:
 
     def __del__(self):
         """ close db"""
+        self.cur.close()
         self.conn.close()
 
     def update(self, table, data, condition):
@@ -28,6 +29,7 @@ class Database:
         sql = ''' update %s set %s %s''' %(table, set_string, cond)
 
         self.cur.execute(sql)
+        self.conn.commit()
 
     def insert(self, table, values={}):
         column_list = []
@@ -39,6 +41,7 @@ class Database:
         sql = ''' insert into %s (%s) values (%s)''' %(table, ','.join(column_list), ','.join(["\'%s\'" %s for s in value_list]))
 
         self.cur.execute(sql)
+        self.conn.commit()
 
     def create_table(self, table, columns):
 
@@ -78,17 +81,3 @@ class Database:
 
         self.cur.execute(sql)
         return self.cur.fetchall()
-
-
-
-db = Database('mrrs.bj.intel.com', 'mrrs', 'mrrs', 'changes_testing')
-
-print db.create_table('testing', ['name CHAR(30)', 'email CHAR(90)'])
-
-print db.columns('testing')
-
-print db.insert('testing', {'name':'ttt', 'email':'emmmm@intel.com'})
-
-print db.select('testing', ['*'])
-
-print db.update('testing', {'email':'123@example.com'}, {'name':'ttt'})
diff --git a/obspkg.py b/obspkg.py
new file mode 100644 (file)
index 0000000..4fd6123
--- /dev/null
+++ b/obspkg.py
@@ -0,0 +1,217 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2012 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+from __future__ import with_statement
+import os
+import shutil
+import buildservice
+import runner
+import msger
+import errors
+from utils import Workdir
+
+class ObsPackage(object):
+    """ Wrapper class of local package dir of OBS
+    """
+
+    def __init__(self, basedir, prj, pkg, dst_prj=None, dst_pkg=None, apiurl=None, oscrc=None):
+        """Arguments:
+          basedir: the base local dir to store obs packages
+          prj: obs project
+          pkg: obs package
+          apiurl: optional, the api url of obs service
+                 if not specified, the one from oscrc will be used
+          oscrc: optional, the path of customized oscrc
+                 if not specified, ~/.oscrc will be used
+        """
+
+        if oscrc:
+            self._oscrc = oscrc
+        else:
+            self._oscrc = os.path.expanduser('~/.oscrc')
+
+        #self._bs = msger.PrintBufWrapper(buildservice.BuildService, #class
+        #                                 msger.verbose, # func_for_stdout
+        #                                 msger.warning, # func_for_stderr
+        #                                 apiurl, oscrc) # original args
+        self._bs = buildservice.BuildService(apiurl, oscrc)
+        self._apiurl = self._bs.apiurl
+
+        self._bdir = os.path.abspath(os.path.expanduser(basedir))
+        self._prj = prj
+        self._pkg = pkg
+        self._dst_prj = dst_prj;
+        if dst_pkg:
+            self._dst_pkg = dst_pkg;
+        else:
+            self._dst_pkg = pkg;
+        self._pkgpath = os.path.join(self._bdir, prj, pkg)
+
+        if not os.path.exists(self._bdir):
+            os.makedirs(self._bdir)
+
+        with Workdir(self._bdir):
+            shutil.rmtree(prj, ignore_errors = True)
+
+        if self._bs.isNewPackage(prj, pkg):
+            # to init new package in local dir
+            self._mkpac()
+        else:
+            # to checkout server stuff
+            self._checkout_latest()
+
+    def _mkpac(self):
+        with Workdir(self._bdir):
+            self._bs.mkPac(self._prj, self._pkg)
+
+    @msger.waiting
+    def _checkout_latest(self):
+        """ checkout the 'latest' revision of package with link expanded
+        """
+
+        with Workdir(self._bdir):
+            try:
+                self._bs.checkout(self._prj, self._pkg)
+            except buildservice.ObsError, err:
+                raise errors.ObsError(str(err))
+
+    def is_new_pkg(self):
+        return self._bs.isNewPackage(self._prj, self._pkg)
+
+    def get_workdir(self):
+        return self._pkgpath
+
+    def remove_all(self):
+        """Remove all files under pkg dir
+        """
+
+        with Workdir(self._pkgpath):
+            runner.quiet('/bin/rm -f *')
+
+    def update_local(self):
+        """Do the similar work of 'osc addremove',
+          remove all deleted files and added all new files
+        """
+
+        with Workdir(self._pkgpath):
+            pac = self._bs.findPac()
+            # FIXME, if pac.to_be_added are needed to be considered.
+            pac.todo = list(set(pac.filenamelist + pac.filenamelist_unvers))
+            for filename in pac.todo:
+                if os.path.isdir(filename):
+                    continue
+                # ignore foo.rXX, foo.mine for files which are in 'C' state
+                if os.path.splitext(filename)[0] in pac.in_conflict:
+                    continue
+                state = pac.status(filename)
+                if state == '?':
+                    pac.addfile(filename)
+                elif state == '!':
+                    pac.delete_file(filename)
+
+    def add_file(self, fpath):
+        # copy the file in
+        runner.quiet('/bin/cp -f %s %s' % (fpath, self._pkgpath))
+
+        # add it into local pac
+        with Workdir(self._pkgpath):
+            pac = self._bs.findPac()
+            if pac:
+                pac.addfile(os.path.basename(fpath))
+            else:
+                msger.warning('Invalid pac working dir, skip')
+
+    @msger.waiting
+    def commit(self, msg):
+        with Workdir(self._pkgpath):
+            self._bs.submit(msg)
+
+    def submit_req(self, msg=''):
+        with Workdir(self._pkgpath):
+            reqs = self._bs.getRequestList(self._dst_prj, self._dst_pkg)
+            print reqs
+            for req in reqs:
+                msg += '\n\n%s' % req.description
+            print msg
+            newreq = self._bs.submitReq(self._prj, self._pkg, self._dst_prj, self._dst_pkg, msg, src_update='cleanup')
+            print newreq
+            for req in reqs:
+                self._bs.reqSupersede(req.reqid, 'superseded by %s' % newreq, newreq)
+
+class ObsProject(object):
+    """ Wrapper class of project in OBS
+    """
+
+    def __init__(self, prj, apiurl=None, oscrc=None):
+        """Arguments:
+          prj: name of obs project
+          apiurl: optional, the api url of obs service
+                 if not specified, the one from oscrc will be used
+          oscrc: optional, the path of customized oscrc
+                 if not specified, ~/.oscrc will be used
+        """
+
+        if oscrc:
+            self._oscrc = oscrc
+        else:
+            self._oscrc = os.path.expanduser('~/.oscrc')
+
+        self._bs = buildservice.BuildService(apiurl, oscrc)
+        self._apiurl = self._bs.apiurl
+        self._prj = prj
+
+    def is_new(self):
+        return self._bs.isNewProject(self._prj)
+
+    def create(self):
+        """Create an empty project"""
+        # TODO
+        pass
+
+    def branch_from(self, src_prj):
+        """Create a new branch project of `src_prj`
+        """
+
+        if self._bs.isNewProject(src_prj):
+            raise errors.ObsError('project: %s do not exists' % src_prj)
+
+        if not self.is_new():
+            msger.warning('branched project: %s exists' % self._prj)
+            return
+
+        # pick the 1st valid package inside src prj FIXME
+        pkglist = self._bs.getPackageList(src_prj)
+        if len(pkglist) == 0:
+            raise errors.ObsError('base project %s is empty.' % src_prj)
+        dumb_pkg = pkglist[0]
+
+        # branch out the new one
+        target_prj, target_pkg = self._bs.branchPkg(src_prj, dumb_pkg,
+                                                    target_project = self._prj,
+                                                    target_package = 'dumb_pkg')
+
+        if target_prj != self._prj:
+            raise errors.ObsError('branched prj: %s is not the expected %s' \
+                           % (target_prj, self._prj))
+
+        # remove the dumb pkg
+        self._bs.deletePackage(target_prj, target_pkg)
+
+    def list(self):
+        """Get all packages name in this project"""
+        return self._bs.getPackageList(self._prj)
diff --git a/policycheck.py b/policycheck.py
new file mode 100755 (executable)
index 0000000..27a7385
--- /dev/null
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+
+"""This script will do policy check when patch is created in gerrit.
+"""
+
+import os
+import re
+import tempfile
+import glob
+import shutil
+
+import runner
+import utils
+import git
+import obspkg
+from envparas import *
+
+import gbp.rpm
+
+mappingprj = 'scm/git-obs-mapping'
+gerritcmd = 'ssh -p %s %s@%s gerrit' % (GERRIT_SSHPORT, GERRIT_USERNAME, GERRIT_HOSTNAME)
+giturl = 'ssh://%s@%s:%s' % (GERRIT_USERNAME, GERRIT_HOSTNAME, GERRIT_SSHPORT)
+
+def end(rc = 0):
+    shutil.rmtree(tmpdir)
+    exit(rc)
+
+def update_git_project(workdir, prj):
+    result = True
+    prjdir = os.path.join(workdir, prj)
+
+    with utils.Workdir(workdir):
+        if os.path.isdir(prjdir):
+            gitprj = git.Git(prjdir)
+            if not gitprj.pull():
+                shutil.rmtree(prjdir)
+                if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                    result = False
+        else:
+            if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                result = False
+
+    if not result:
+        shutil.rmtree(prjdir)
+    return result
+
+def check_obs_newpkg(obsprj, pkg):
+    pkglist = obsprj.list()
+    if pkg in pkglist:
+        return False
+    else:
+        return True
+
+def check_spec(spec):
+    if os.path.exists(spec):
+        #return runner.outs('/var/lib/jenkins/jenkins-scripts/rpmlint_wapper.py %s' % spec)
+        return utils.rpmlint(spec).replace(prjdir+'/', '')
+    else:
+        missedspec.append(os.path.basename(spec))
+        return ''
+
+if __name__ == '__main__':
+
+    # current workspace dir
+    workspace = os.getcwd()
+    # Jenkins home dir
+    homedir = os.path.abspath(os.path.join(workspace, '../../..'))
+    tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+    prjdir = os.path.join(tmpdir, GERRIT_PROJECT)
+    prjpath, prj = os.path.split(GERRIT_PROJECT)
+
+    if not os.path.isfile('%s/git/%s/git-obs-mapping.xml' % (homedir, mappingprj)):
+        if not utils.retry(update_git_project, (os.path.join(homedir, 'git'), mappingprj)):
+            end(1)
+
+    if utils.retry(update_git_project, (os.path.join(homedir, 'git'), GERRIT_PROJECT)):
+        shutil.copytree(os.path.join(homedir, 'git', GERRIT_PROJECT), prjdir, True)
+    if not utils.retry(update_git_project, (tmpdir, GERRIT_PROJECT)):
+        end(1)
+
+    mygit = git.Git(prjdir)
+    mygit.fetch('%s/%s' % (giturl, GERRIT_PROJECT), GERRIT_REFSPEC, '-t')
+    mygit.checkout('FETCH_HEAD')
+
+    packagingdir = utils.parse_link('%s/%s' % (prjdir, 'packaging'))
+    needsr = False
+    filelist = mygit.show('--pretty="format:"', '--name-only', GERRIT_PATCHSET_REVISION)
+    p = re.compile('^%s/.*\.changes$' % packagingdir)
+    for filename in filelist:
+        if p.match(filename):
+            tag = mygit.describe('--tags --exact-match', GERRIT_PATCHSET_REVISION)
+            if tag:
+                needsr = True
+                break
+
+    mapping = utils.parse_mapping('%s/git/%s/git-obs-mapping.xml' % (homedir, mappingprj), GERRIT_PROJECT, GERRIT_BRANCH)
+    obstarget = []
+    newpkg = False
+    missedspec = []
+    checkspecmsg = ''
+    checkobsmsg = ''
+
+    speclist = []
+
+    for target in mapping:
+        (obs_dest_prj, obs_stg_prj, obs_pkg) = target
+        if not obs_dest_prj:
+            continue
+        if obs_pkg:
+            specfile = '%s/%s/%s.spec' % (prjdir, packagingdir, obs_pkg)
+        else:
+            specfile = utils.guess_spec('%s/%s' % (prjdir, packagingdir))
+            if not specfile:
+                specfile = '%s/%s/%s.spec' % (prjdir, packagingdir, prj)
+
+        if not obs_stg_prj or needsr:
+            obstarget.append(obs_dest_prj)
+            if os.path.exists(specfile):
+                try:
+                    spec = gbp.rpm.parse_spec(specfile)
+                except exception,exc:
+                    mesger.error('%s' % exc)
+                    end(1)
+                obsprj = obspkg.ObsProject(obs_dest_prj, apiurl = OBS_API_URL, oscrc = OBS_OSCRC_PATH)
+                if check_obs_newpkg(obsprj, spec.name):
+                    newpkg = True
+
+        if specfile not in speclist:
+            speclist.append(specfile)
+            if os.path.exists(os.path.join(prjdir, packagingdir)):
+                msg = check_spec(specfile)
+            else:
+                msg = 'Error: Can not find packaging directory!'
+            if checkspecmsg:
+                checkspecmsg += '\n\n'
+            checkspecmsg += msg
+
+    if not mapping:
+        checkobsmsg = '[IMPORTANT NOTICE]: The change for %s branch will not be submitted to OBS according configuration in gerrit scm/git-obs-mapping project. If needed, please modify scm/git-obs-mapping to enable submission to OBS.' % GERRIT_BRANCH
+    elif not obstarget:
+        checkobsmsg = '[IMPORTANT NOTICE]: This change will not be submitted to OBS. If want to trigger submission to OBS, please make sure this change meets all below criteria.\n- The commit includes changes to the change log file under packaging directory.\n- A tag is created on this commit, and pushed together with the commit to Gerrit at the same time.'
+    else:
+        checkobsmsg = '[IMPORTANT NOTICE]: '
+        if newpkg:
+            checkobsmsg += '[New Package] '
+        checkobsmsg += 'This change will be submitted to OBS %s project!!!' % ' '.join(obstarget)
+    if missedspec:
+        checkspecmsg += '\n\nError: Can not find %s under packaging directory!' % ' '.join(missedspec)
+
+    runner.quiet('%s %s %s,%s --message \'"%s"\'' % (gerritcmd, 'review', GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER, checkobsmsg))
+    runner.quiet('%s %s %s,%s --message \'"%s"\'' % (gerritcmd, 'review', GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER, checkspecmsg))
+    end()
diff --git a/rpmlint_wapper.py b/rpmlint_wapper.py
new file mode 100755 (executable)
index 0000000..d09c847
--- /dev/null
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+import sys, os
+import tempfile, locale
+
+sys.path.insert(1, '/usr/share/rpmlint')
+
+from Filter import setRawOut, printed_messages, badnessThreshold, _badness_score
+import SpecCheck
+import Config
+import Pkg
+import Filter
+
+def __print(s):
+    pass
+
+def _print(msgtype, pkg, reason, details):
+    global _badness_score
+
+    threshold = badnessThreshold()
+
+    badness = 0
+    if threshold >= 0:
+        badness = Config.badness(reason)
+        # anything with badness is an error
+        if badness:
+            msgtype = 'E'
+        # errors without badness become warnings
+        elif msgtype == 'E':
+            msgtype = 'W'
+
+    ln = ""
+    if pkg.current_linenum is not None:
+        ln = "%s:" % pkg.current_linenum
+    arch = ""
+    if pkg.arch is not None:
+        arch = ".%s" % pkg.arch
+    s = "%s%s:%s %s: %s" % (pkg.name, arch, ln, msgtype, reason)
+    if badness:
+        s = s + " (Badness: %d)" % badness
+    for d in details:
+        s = s + " %s" % d
+    else:
+        if not Config.isFiltered(s):
+            outputs.append(s)
+            printed_messages[msgtype] += 1
+            _badness_score += badness
+            if threshold >= 0:
+                _diagnostic.append(s + "\n")
+            else:
+                __print(s)
+                if Config.info:
+                    printDescriptions(reason)
+            return True
+
+    return False
+
+
+Filter._print=_print
+spec_file=sys.argv[1]
+
+# the tempfile is designed for python policycheck.py, bash script doesn't use it
+
+try:
+    execfile(os.path.expanduser('~/.config/rpmlint'))
+except IOError:
+    pass
+
+
+outputs = []
+
+pkg = Pkg.FakePkg(spec_file)
+check = SpecCheck.SpecCheck()
+check.check_spec(pkg, spec_file)
+pkg.cleanup()
+
+print "rpmlint checked %s: %d errors, %s warnings." % (spec_file, printed_messages["E"], printed_messages["W"])
+for line in outputs:
+    line = line.strip().lstrip(spec_file+':').strip()
+    if not line.startswith('W:') and not line.startswith('E:'):
+        line = 'line '+line
+    print '- '+line
diff --git a/runner.py b/runner.py
new file mode 100644 (file)
index 0000000..fd8082e
--- /dev/null
+++ b/runner.py
@@ -0,0 +1,130 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2011 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os
+import subprocess
+
+import msger
+
+def runtool(cmdln_or_args, catch=1):
+    """ wrapper for most of the subprocess calls
+    input:
+        cmdln_or_args: can be both args and cmdln str (shell=True)
+        catch: 0, quitely run
+               1, only STDOUT
+               2, only STDERR
+               3, both STDOUT and STDERR
+    return:
+        (rc, output)
+        if catch==0: the output will always None
+    """
+
+    if catch not in (0, 1, 2, 3):
+        # invalid catch selection, will cause exception, that's good
+        return None
+
+    if isinstance(cmdln_or_args, list):
+        cmd = cmdln_or_args[0]
+        shell = False
+    else:
+        import shlex
+        cmd = shlex.split(cmdln_or_args)[0]
+        shell = True
+
+    if catch != 3:
+        dev_null = os.open("/dev/null", os.O_WRONLY)
+
+    if catch == 0:
+        sout = dev_null
+        serr = dev_null
+    elif catch == 1:
+        sout = subprocess.PIPE
+        serr = dev_null
+    elif catch == 2:
+        sout = dev_null
+        serr = subprocess.PIPE
+    elif catch == 3:
+        sout = subprocess.PIPE
+        serr = subprocess.STDOUT
+
+    try:
+        process = subprocess.Popen(cmdln_or_args, stdout=sout,
+                             stderr=serr, shell=shell)
+        out = process.communicate()[0]
+        if out is None:
+            out = ''
+    except OSError, exc:
+        if exc.errno == 2:
+            # [Errno 2] No such file or directory
+            msger.error('Cannot run command: %s, lost dependency?' % cmd)
+        else:
+            raise # relay
+    finally:
+        if catch != 3:
+            os.close(dev_null)
+
+    return (process.returncode, out)
+
+def show(cmdln_or_args):
+    # show all the message using msger.verbose
+
+    rcode, out = runtool(cmdln_or_args, catch=3)
+
+    if isinstance(cmdln_or_args, list):
+        cmd = ' '.join(cmdln_or_args)
+    else:
+        cmd = cmdln_or_args
+
+    msg =  'running command: "%s"' % cmd
+    if out: out = out.strip()
+    if out:
+        msg += ', with output::'
+        msg += '\n  +----------------'
+        for line in out.splitlines():
+            msg += '\n  | %s' % line
+        msg += '\n  +----------------'
+
+    msger.verbose(msg)
+    return rcode
+
+def outs(cmdln_or_args, catch=1):
+    # get the outputs of tools
+    return runtool(cmdln_or_args, catch)[1].strip()
+
+def quiet(cmdln_or_args):
+    return runtool(cmdln_or_args, catch=0)[0]
+
+def embed(cmdln_or_args):
+    # embed shell script into python frame code
+
+    if isinstance(cmdln_or_args, list):
+        args = cmdln_or_args
+    else:
+        import shlex
+        args = shlex.split(cmdln_or_args)
+
+    try:
+        sts = subprocess.call(args)
+    except OSError, exc:
+        if exc.errno == 2:
+            # [Errno 2] No such file or directory
+            msger.error('Cannot run command: %s, lost dependency?' % args[0])
+        else:
+            raise # relay
+
+    return sts
diff --git a/submitobs.py b/submitobs.py
new file mode 100755 (executable)
index 0000000..1ef020e
--- /dev/null
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+# vim: ai ts=4 sts=4 et sw=4
+
+"""This script will sync a merged change in gerrit to OBS corresponding project.
+"""
+
+import os
+import tempfile
+import glob
+import shutil
+import re
+
+import runner
+import utils
+import git
+import obspkg
+from envparas import *
+import errors
+import mysql
+
+import gbp.rpm
+
+mappingprj = 'scm/git-obs-mapping'
+rpmlintprj = 'scm/rpmlint-config'
+giturl = 'ssh://%s@%s:%s' % (GERRIT_USERNAME, GERRIT_HOSTNAME, GERRIT_SSHPORT)
+
+def end(rc = 'success'):
+    db = mysql.Database(MYSQL_HOSTNAME, MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_DB_NAME)
+    shutil.rmtree(tmpdir)
+    if rc == 'success':
+        db.update('ChangeMerged_Event', {'state' : 'TRIGGER_SUCCESS'}, 
+                  {'changeNum' : GERRIT_CHANGE_NUMBER,
+                   'patchsetNum' : GERRIT_PATCHSET_NUMBER})
+        exit(0)
+    elif rc == 'failure':
+        db.update('ChangeMerged_Event', {'state' : 'TRIGGER_FAILURE'}, 
+                  {'changeNum' : GERRIT_CHANGE_NUMBER,
+                   'patchsetNum' : GERRIT_PATCHSET_NUMBER})
+        exit(0)
+    elif rc == 'retry':
+        db.update('ChangeMerged_Event', {'state' : 'TRIGGER_RETRY'}, 
+                  {'changeNum' : GERRIT_CHANGE_NUMBER,
+                   'patchsetNum' : GERRIT_PATCHSET_NUMBER})
+        exit(1)
+
+def update_git_project(workdir, prj):
+    result = True
+    prjdir = os.path.join(workdir, prj)
+
+    with utils.Workdir(workdir):
+        if os.path.isdir(prjdir):
+            gitprj = git.Git(prjdir)
+            if not gitprj.pull():
+                shutil.rmtree(prjdir)
+                if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                    result = False
+        else:
+            if runner.runtool('git clone %s/%s %s' % (giturl, prj, prj))[0] != 0:
+                result = False
+
+    if not result:
+        shutil.rmtree(prjdir)
+    return result
+
+if __name__ == '__main__':
+
+    # current workspace dir
+    workspace = os.getcwd()
+    # Jenkins home dir
+    homedir = os.path.abspath(os.path.join(workspace, '../../..'))
+    tmpdir = tempfile.mkdtemp(prefix=workspace+'/')
+    prjdir = os.path.join(tmpdir, GERRIT_PROJECT)
+    prjpath, prj = os.path.split(GERRIT_PROJECT)
+
+    if GERRIT_PROJECT == mappingprj or not os.path.isfile('%s/git/%s/git-obs-mapping.xml' % (homedir, mappingprj)):
+        if not utils.retry(update_git_project, (os.path.join(homedir, 'git'), mappingprj)):
+            end('retry')
+        if GERRIT_PROJECT == mappingprj:
+            end('success')
+
+    if utils.retry(update_git_project, (os.path.join(homedir, 'git'), GERRIT_PROJECT)):
+        shutil.copytree(os.path.join(homedir, 'git', GERRIT_PROJECT), prjdir, True)
+    if not utils.retry(update_git_project, (tmpdir, GERRIT_PROJECT)):
+        end(1)
+
+    if GERRIT_PROJECT == rpmlintprj:
+        if not os.path.exists('%s/.config' % homedir):
+            os.makedirs('%s/.config' % homedir)
+        shutil.copy2('%s/rpmlint' % prjdir, '%s/.config/rpmlint' % homedir)
+        end('success')
+
+    mygit = git.Git(prjdir)
+    mygit.checkout('origin/%s' % GERRIT_BRANCH)
+
+    packagingdir = utils.parse_link('%s/%s' % (prjdir, 'packaging'))
+    needsr = False
+    filelist = mygit.show('--pretty="format:"', '--name-only', GERRIT_PATCHSET_REVISION)
+    p = re.compile('^%s/.*\.changes$' % packagingdir)
+    for filename in filelist:
+        if p.match(filename):
+            tag = mygit.describe('--tags --exact-match', GERRIT_PATCHSET_REVISION)
+            if tag:
+                needsr = True
+                break
+
+    commitinfo = mygit.get_commit_info(GERRIT_PATCHSET_REVISION)
+    msg = 'Commit: %s %s\nOwner: %s <%s>\nGerrit URL: %s' % (commitinfo['id'], commitinfo['subject'], GERRIT_PATCHSET_UPLOADER_NAME, GERRIT_PATCHSET_UPLOADER_EMAIL, GERRIT_CHANGE_URL)
+    mapping = utils.parse_mapping('%s/git/%s/git-obs-mapping.xml' % (homedir, mappingprj), GERRIT_PROJECT, GERRIT_BRANCH)
+
+    for target in mapping:
+        mygit.clean('-fd')
+        (obs_dst_prj, obs_stg_prj, obs_pkg) = target
+        if not obs_dst_prj:
+            continue
+
+        if obs_stg_prj:
+            if not needsr:
+                continue
+        else:
+            obs_stg_prj = obs_dst_prj
+
+        if not obs_dst_prj:
+            continue
+        if obs_pkg:
+            specfile = '%s/%s/%s.spec' % (prjdir, packagingdir, obs_pkg)
+        else:
+            specfile = utils.guess_spec('%s/%s' % (prjdir, packagingdir))
+            if not specfile:
+                specfile = '%s/%s/%s.spec' % (prjdir, packagingdir, prj)
+
+        if not os.path.exists(specfile):
+            continue
+        try:
+            spec = gbp.rpm.parse_spec(specfile)
+        except GbpError, exc:
+            msger.error('parse spec failed. %s' % exc)
+            end('failure')
+
+        outdir = tempfile.mkdtemp(prefix=tmpdir+'/')
+        with utils.Workdir(prjdir):
+            runner.quiet('gbs export --spec=%s -o %s' % (specfile, outdir))
+        tarballdir = os.path.join(outdir, os.listdir(outdir)[0])
+
+        if obs_stg_prj != obs_dst_prj:
+            tmppkg = obspkg.ObsPackage(tmpdir, obs_stg_prj, 'tmp',
+                                       apiurl = OBS_API_URL, oscrc = OBS_OSCRC_PATH)
+            if tmppkg.is_new_pkg():
+                tmppkg.commit("Leave an empty package in this project to prevent OBS delete it automatically when all request from here are accepted.")
+
+        localpkg = obspkg.ObsPackage(tmpdir, obs_stg_prj, spec.name,
+                                     dst_prj = obs_dst_prj, apiurl = OBS_API_URL, oscrc = OBS_OSCRC_PATH)
+        oscworkdir = localpkg.get_workdir()
+        localpkg.remove_all()
+        for myfile in os.listdir(tarballdir):
+            shutil.copy2(os.path.join(tarballdir, myfile), os.path.join(oscworkdir, myfile)) 
+        localpkg.update_local()
+
+        retry_count = 3
+        while retry_count > 0:
+            try:
+                localpkg.commit(msg)
+                print 'needsr', needsr
+                if needsr:
+                    localpkg.submit_req(msg)
+                break
+            except errors.ObsError, exc:
+                msger.warning('obs operation failed, retrying...')
+                sleep(1)
+                retry_count -= 1
+
+        if not retry_count:
+            end('retry')
+    
+    end('success')
diff --git a/utils.py b/utils.py
new file mode 100644 (file)
index 0000000..2391dec
--- /dev/null
+++ b/utils.py
@@ -0,0 +1,188 @@
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2011 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os
+import glob
+import shutil
+from lxml import etree
+import sys
+import tempfile, locale
+
+sys.path.insert(1, '/usr/share/rpmlint')
+
+from Filter import setRawOut, printed_messages, badnessThreshold, _badness_score
+import SpecCheck
+import Config
+import Pkg
+import Filter
+    
+import errors
+import msger
+import runner
+
+class Workdir(object):
+    def __init__(self, path):
+        self._newdir = path
+        self._cwd = os.getcwd()
+
+    def __enter__(self):
+        if not os.path.exists(self._newdir):
+            os.makedirs(self._newdir)
+        os.chdir(self._newdir)
+
+    def __exit__(self, _type, _value, _tb):
+        os.chdir(self._cwd)
+
+def retry(func, opts):
+    """This will try to execute one function 3 times(by default) until success."""
+    retry_count = 3
+    while retry_count > 0:
+        if apply(func, opts):
+            return True
+        else:
+            retry_count -= 1
+            if not retry_count:
+                return False
+
+prj_xpath = '/mapping/project[@name="GERRIT_PROJECT"]/branch[@name="GERRIT_BRANCH"] | /mapping/project[@name="/GERRIT_PROJECT"]/branch[@name="GERRIT_BRANCH"]'
+prj_block_xpath = '/mapping/project[@name="GERRIT_PROJECT" and @submission="N"] | /mapping/project[@name="/GERRIT_PROJECT" and @submission="N"] | /mapping/project[@name="GERRIT_PROJECT"]/branch[@name="GERRIT_BRANCH" and @submission="N"] | /mapping/project[@name="/GERRIT_PROJECT"]/branch[@name="GERRIT_BRANCH" and @submission="N"]'
+path_xpath = '/mapping/default/path[@name="PATH_NAME"]/branch[@name="GERRIT_BRANCH"] | /mapping/default/path[@name="PATH_NAME/"]/branch[@name="GERRIT_BRANCH"]'
+path_block_xpath = '/mapping/default/path[@name="PATH_NAME" and @submission="N"] | /mapping/default/path[@name="PATH_NAME/" and @submission="N"] | /mapping/default/path[@name="PATH_NAME"]/branch[@name="GERRIT_BRANCH" and @submission="N"] | /mapping/default/path[@name="PATH_NAME/"]/branch[@name="GERRIT_BRANCH" and @submission="N"]'
+def parse_mapping(mapping_file, project, branch):
+    """Parse the mapping xml file to get the target OBS project
+    """
+    def parse_node(node):
+        return (node.get("OBS_project"), node.get("OBS_staging_project"), node.get("OBS_package"))
+    if not os.path.isfile(mapping_file):
+        raise errors.XmlError('%s isn\'t a regular file.' % file)
+    root = etree.parse(mapping_file).getroot()
+    if root.xpath(prj_block_xpath.replace('GERRIT_PROJECT', project).replace('GERRIT_BRANCH', branch)):
+        return []
+    nodelist = root.xpath(prj_xpath.replace('GERRIT_PROJECT', project).replace('GERRIT_BRANCH', branch))
+    if nodelist:
+        result = []
+        for node in nodelist:
+            result.append(parse_node(node))
+        return result
+        
+    pathname = '/' + project
+    while True:
+        pathname = os.path.split(pathname)[0]
+        if root.xpath(path_block_xpath.replace('PATH_NAME', pathname).replace('GERRIT_BRANCH', branch)):
+            return []
+        nodelist = root.xpath(path_xpath.replace('PATH_NAME', pathname).replace('GERRIT_BRANCH', branch))
+        if nodelist:
+            result = []
+            for node in nodelist:
+                result.append(parse_node(node))
+            return result
+        if pathname == '/':
+            return []
+def guess_spec(workdir):
+    specs = glob.glob('%s/*.spec' % workdir)
+    if len(specs) > 1:
+        print("Can't decide which spec file to use.")
+    elif len(specs) == 1:
+        return specs[0]
+    return None
+
+def parse_link(path):
+    if os.path.islink(path):
+        return runner.outs('readlink %s' % path)
+    else:
+        return os.path.basename(path)
+
+def rpmlint(spec_file):
+
+    def __print(s):
+        pass
+
+    def _print(msgtype, pkg, reason, details):
+        global _badness_score
+
+        threshold = badnessThreshold()
+
+        badness = 0
+        if threshold >= 0:
+            badness = Config.badness(reason)
+            # anything with badness is an error
+            if badness:
+                msgtype = 'E'
+            # errors without badness become warnings
+            elif msgtype == 'E':
+                msgtype = 'W'
+
+        ln = ""
+        if pkg.current_linenum is not None:
+            ln = "%s:" % pkg.current_linenum
+        arch = ""
+        if pkg.arch is not None:
+            arch = ".%s" % pkg.arch
+        s = "%s%s:%s %s: %s" % (pkg.name, arch, ln, msgtype, reason)
+        if badness:
+            s = s + " (Badness: %d)" % badness
+        for d in details:
+            s = s + " %s" % d
+        else:
+            if not Config.isFiltered(s):
+                outputs.append(s)
+                printed_messages[msgtype] += 1
+                _badness_score += badness
+                if threshold >= 0:
+                    _diagnostic.append(s + "\n")
+                else:
+                    __print(s)
+                    if Config.info:
+                        printDescriptions(reason)
+                return True
+
+        return False
+
+
+    Filter._print=_print
+    #spec_file=sys.argv[1]
+
+    # the tempfile is designed for python policycheck.py, bash script doesn't use it
+
+    try:
+        execfile(os.path.expanduser('~/.config/rpmlint'))
+    except IOError:
+        pass
+
+    outputs = []
+
+    pkg = Pkg.FakePkg(spec_file)
+    check = SpecCheck.SpecCheck()
+    check.check_spec(pkg, spec_file)
+    pkg.cleanup()
+
+    msg = 'rpmlint checked %s: %d errors, %s warnings.' % (spec_file, printed_messages["E"], printed_messages["W"])
+    for line in outputs:
+        line = line.strip().lstrip(spec_file+':').strip()
+        if not line.startswith('W:') and not line.startswith('E:'):
+            line = 'line '+line
+        msg += '\n- '+line
+    return msg