passx=%(passwdx)s
"""
+OSCRC_MULTI_TEMPLATE = """[general]
+apiurl = %(apiurl)s
+plaintext_passwd=0
+use_keyring=0
+http_debug = %(http_debug)s
+debug = %(debug)s
+gnome_keyring=0
+[%(apiurl)s]
+sslcertck=0
+user=%(user)s
+passx=%(passwdx)s
+[%(remote_apiurl)s]
+sslcertck=0
+user=%(remote_user)s
+passx=%(remote_passwdx)s
+
+"""
+
+
class BuildService(OSC):
"""Interface to Build Service API"""
self.oscrcpath = tmpf.path
OSC.__init__(self, apiurl, self.oscrcpath)
+ def __init__(self, apiurl, apiuser, apipasswd, \
+ remote_apiurl=None, remote_apiuser=None, remote_apipasswd=None):
+ if not remote_apiurl and not remote_apiuser and not remote_apipasswd:
+ oscrc = OSCRC_TEMPLATE % {
+ "http_debug": 0,
+ "debug": 0,
+ "apiurl": apiurl,
+ "user": apiuser,
+ "passwdx": encode_passwd(apipasswd)}
+ else:
+ oscrc = OSCRC_MULTI_TEMPLATE % {
+ "http_debug": 0,
+ "debug": 0,
+ "apiurl": apiurl,
+ "user": apiuser,
+ "passwdx": encode_passwd(apipasswd),
+ "remote_apiurl": remote_apiurl,
+ "remote_user": remote_apiuser,
+ "remote_passwdx": encode_passwd(remote_apipasswd)}
+ self.apiurl = apiurl
+ self.remote_apiurl = remote_apiurl
+ tmpf = Temp(prefix='.oscrc', content=oscrc)
+ self.oscrcpath = tmpf.path
+ OSC.__init__(self, apiurl, self.oscrcpath)
+
def get_src_file_list(self, project, package, revision=None):
""" get source file list of prj/pac
"""
query = {}
if deleted:
query['deleted'] = 1
+ else:
+ query['deleted'] = 0
url = core.makeurl(self.apiurl, ['source', prj], query)
_file = core.http_GET(url)
def get_info(self, prj, pkg=None):
"""Get info dictionary, saved in description."""
description = self.get_description(prj, pkg)
+ if not description: return {}
try:
return json.loads(description)
except ValueError:
xml_root.append(link_element)
# Set linkedbuild attribute for all repositories
- for repo_element in xml_root.findall('repository'):
- repo_element.set('linkedbuild', linktype)
+ if linktype is not None:
+ for repo_element in xml_root.findall('repository'):
+ repo_element.set('linkedbuild', linktype)
self.set_meta(ElementTree.tostring(xml_root), project)
# remove repo
if not repo == None:
- for repo_element in xml_root.findall('repository'):
- if not repo_element.get('name') == repo:
- xml_root.remove(repo_element)
+ if type(repo) is str:
+ repo = [repo]
+ for repo_element in xml_root.findall('repository'):
+ if repo_element.get('name') not in repo:
+ xml_root.remove(repo_element)
# Set linkedbuild attribute for all repositories
for repo_element in xml_root.findall('repository'):
core.copy_pac(self.apiurl, src_project, src_package, self.apiurl, dst_project, dst_package,
client_side_copy,revision)
+ def create_copy_pac_from_remote(self, src_project, src_package, dst_project, dst_package):
+ """create a copy of package
+ Copying can be done by downloading the files from one package and commit
+ them into the other by uploading them (client-side copy) --
+ or by the server, in a single api call.
+ """
+ if self.apiurl != self.remote_apiurl:
+ client_side_copy = True
+ expand = True
+ keep_link = False
+ else:
+ client_side_copy = False
+ expand = False
+ keep_link = False
+
+ rev = core.show_upstream_rev(self.remote_apiurl, src_project, src_package)
+ comment = 'copypac from project:%s package:%s revision:%s' % \
+ ( src_project, src_package, rev )
+ if keep_link:
+ comment += ", using keep-link"
+ if expand:
+ comment += ", using expand"
+ if client_side_copy:
+ comment += ", using client side copy"
+
+ core.copy_pac(self.remote_apiurl, src_project, src_package,
+ self.apiurl, dst_project, dst_package,
+ client_side_copy=client_side_copy,
+ expand=expand,
+ comment=comment,
+ keep_link=keep_link)
+
def get_dependson(self, project, repo, arch, packages=None, reverse=None):
"""
get_dependson
"""
return core.show_upstream_rev(self.apiurl, project, package, revision, expand, linkrev, meta, include_service_files)
- def disable_build_flag(self, prj,repo, flag, status):
+ def disable_build_flag(self, prj, repo, flag, status):
"""disable build flag for the project """
#Started POST "/source/acl-link?cmd=set_flag&flag=build&status=disable"
query = { 'cmd': 'set_flag' }
return
+ def search_status_package(self, prj, interest='failed', pkg=None, repo=None, arch=None, flag=True):
+ #u = core.show_results_meta(self.apiurl, prj)
+ u = core.show_prj_results_meta(self.apiurl, prj)
+ try:
+ tree = ElementTree.fromstring(''.join(u))
+ except:
+ raise
+ packages = {}
+ for result in tree.iter('result'):
+ for status in result.iter('status'):
+ if interest in status.attrib['code']:
+ pkgname = status.attrib['package']
+ list1 = []
+ list1.append({'repository':result.get('repository'),\
+ 'arch':result.get('arch')})
+ if packages.get(pkgname):
+ for x in packages.get(pkgname):
+ list1.append(x)
+ packages[pkgname] = list1
+ return packages
+
def get_build_results(self, prj, view=None, code=None,):
""" get build results """
resultdata['buildstatus'] = summarylist
return resultdata
+ def get_last_submitter(self, prj, pkg, req_state=('accepted', 'new')):
+ u = core.get_request_list(self.apiurl, project=prj, package=pkg, req_state=req_state)
+ submitter = ''
+ reqid = ''
+ for r in u:
+ submitter = r.description.splitlines()[0].split(': ')[1].replace('<','<').replace('>','>')
+ reqid = r.reqid
+ return submitter
+
+ def get_source_file(self, prj, pkg, filename, targetfilename=None):
+ return core.get_source_file(self.apiurl, prj, pkg, filename, targetfilename=targetfilename)
+
+ def get_source_info(self, prj, pkg):
+ """ get source service info """
+
+ u = core.makeurl(self.apiurl, ['source', prj, pkg])
+
+ try:
+ f = core.http_GET(u)
+ return ''.join(f.readlines())
+ except urllib2.HTTPError, err:
+ print err
+ return err
+
+ def set_build_for_repo(self, prj=None, repos=None, flag=None, mode=None):
+ """ control build flag """
+
+ self.default_build_flag(prj, repo = None, flag='build')
+
+ if repos == 'all':
+ repos = self.get_repositories(prj)
+ elif type(repos) is not list:
+ repos = [repos]
+
+ kind = 'prj'
+ path = core.quote_plus(prj),
+ data = core.meta_exists(metatype=kind,
+ path_args=path,
+ template_args=None,
+ create_new=False)
+ if not data:
+ return
+
+ root = ElementTree.fromstring(''.join(data))
+
+ rm_items = []
+ for build in root.getiterator('build'):
+ for item in build.getiterator():
+ if item.tag not in ('disable', 'enable'):
+ continue
+ if item.get('repository') in repos \
+ and item.get('arch') is None:
+ rm_items.append(item)
+ for rm_item in rm_items:
+ build.remove(rm_item)
+ for repo in repos:
+ build.insert(100, ElementTree.Element(mode, repository=repo)) #100 for append to tail
+
+ core.edit_meta(metatype=kind,
+ path_args=path,
+ data=ElementTree.tostring(root))
+
def addPerson(self, prj, users):
"""
add persons to a project
path_args=path,
data=ElementTree.tostring(root))
+ def get_published_repos(self, prj):
+ """
+ get published repos of the project
+ """
+ u = core.makeurl(self.apiurl, ['published', prj])
+ f = core.http_GET(u)
+ tree = ElementTree.parse(f)
+ r = [ node.get('name') for node in tree.findall('entry')]
+ return r
+
+ def get_build_repos(self, prj):
+ """
+ get repos of the project
+ """
+ u = core.makeurl(self.apiurl, ['build', prj])
+ f = core.http_GET(u)
+ tree = ElementTree.parse(f)
+ r = [ node.get('name') for node in tree.findall('entry')]
+ return r
+
+ def get_binarylist(self, project, repository, arch, package='', verbose=False):
+ return core.get_binarylist(self.apiurl, project, repository, arch, package=package, verbose=verbose)
+
def get_sourceinfo_list(self, prj):
"""
Get source info list of the project
{ package1: [],
- package2: ['Source Project'/'Source Package'], # if linked package
+ package2: ['Source Project'/'Source Package', 'Source Project2'/'Source Package2' ], # if linked package
...}
+ Note: If more than one linked packages found, use the last one.
"""
query = {}
query['view'] = 'info'
s_dict = {}
for s in root.iter('sourceinfo'):
s_dict[s.get('package')] = \
- ''.join([node.get('project') + '/' + node.get('package') for node in s.findall('linked')])
+ [node.get('project') + '/' + node.get('package') for node in s.findall('linked')]
return s_dict
def is_lock_project(self, project):
return
+ def get_project_list(self):
+ """Get list of projects matching regexp."""
+ try:
+ projects = core.meta_get_project_list(self.apiurl)
+ except Exception, err:
+ raise ObsError("cat't get list of projects from %s: %s" %
+ (self.apiurl, err))
+ return projects
+
+ def get_repositories(self, project):
+ targets = []
+ tree = ElementTree.fromstring(''.join(core.show_project_meta( \
+ self.apiurl, project)))
+ for repo in tree.findall('repository'):
+ targets.append('%s' % repo.get('name'))
+ return targets
+
+ def get_dependson_from_snapshot(self, url, project, repo, arch):
+ """
+ get revpkgdepends.xml from snapshot url
+ """
+ u = '%s/%s/%s_%s_%s_%s' % (url, 'builddata/depends', project, repo, arch, 'revpkgdepends.xml')
+
+ try:
+ f = core.http_GET(u)
+ return ''.join(f.readlines())
+ except urllib2.HTTPError:
+ print 'get_dependson_from_snapshot http_GET(%s) error' % u
+ return None
+
+ def get_pkgrev_from_snapshot(self, url, project):
+ """
+ get pkgrevisions.xml from snapshot url
+ """
+ u = '%s/%s/%s_%s' % (url, 'builddata/depends', project, 'pkgrevisions.xml')
+ print u
+ try:
+ f = core.http_GET(u)
+ return eval(''.join(f.readlines()))
+ except urllib2.HTTPError:
+ print 'get_pkgrev_from_snapshot http_GET(%s) error' % u
+ return None
+
+ def get_source_viewinfo(self, prj):
+ """
+ Get source viewinfo of the project
+ """
+ query = {}
+ query['view'] = 'info'
+ query['parse'] = 0
+ query['nofilename'] = 1
+
+ u = core.makeurl(self.apiurl, ['source', prj], query)
+ try:
+ return core.http_GET(u)
+ except (urllib2.URLError, urllib2.HTTPError), e:
+ e.osc_msg = 'could not get viewinfo for project \'%s\'' % (prj)
+ raise
+ return
+
if not self.silent_mode:
self._cmd_run('review', args)
-def get_gerrit_event():
+ def ls_groups(self, args=[]):
+ """List Groups"""
+
+ results = self._cmd_run('ls-groups', args)
+
+ return map(lambda t: t.strip(), results)
+
+ def ls_members(self, args=[]):
+ """List Members"""
+
+ results = self._cmd_run('ls-members', args)
+
+ return map(lambda t: t.strip(), results)[1:]
+
+def get_gerrit_event(env_switch=None):
""" get gerrit event info from environment parameters """
event = {}
if element.startswith('GERRIT_'):
event[element[len('GERRIT_'):].lower()] = os.getenv(element)
+ if env_switch:
+ for element in os.environ.keys():
+ if element.startswith(env_switch+'GERRIT_'):
+ event[element[len(env_switch+'GERRIT_'):].lower()] = os.getenv(element)
+
print '\nGerrit Event:\n', json.dumps(event, indent=4)
return event
+
+class GerritEnv(object):
+
+ def __init__(self, env_switch):
+
+ self.host = os.getenv(env_switch+'GERRIT_HOST')
+ self.hostname = os.getenv(env_switch+'GERRIT_HOSTNAME')
+ self.username = os.getenv(env_switch+'GERRIT_USERNAME')
+ self.sshport = os.getenv(env_switch+'GERRIT_SSHPORT')
+ self.gitcache = os.getenv(env_switch+'GIT_CACHE_DIR')
+
localgit.fetch(all_remotes=True)
else:
localgit.fetch(tags=True)
- localgit.pull()
+ try:
+ localgit.pull()
+ except GitRepositoryError, err:
+ print('pull exception: ', err)
+ return True
except GitRepositoryError, gre:
print('git execption: ', gre)
shutil.rmtree(localdir)
return True
@retry()
-def _clone_gitproject(giturl, gerritprj, localdir, bare=False):
+def _clone_gitproject(giturl, gerritprj, localdir, bare=False, git_cache_dir=None):
"""Clone gerrit project from remote to local dir"""
result = True
return True
try:
- cache_dir = os.path.join(os.getenv('GIT_CACHE_DIR'), gerritprj) + '.git'
+ if git_cache_dir is None:
+ cache_dir = os.path.join(os.getenv('GIT_CACHE_DIR'), gerritprj) + '.git'
+ else:
+ cache_dir = os.path.join(git_cache_dir, gerritprj) + '.git'
if os.path.isdir(cache_dir):
# use local cache repo as reference to clone
return result
-def clone_gitproject(gerritprj, localdir, giturl=None, bare=False):
+def clone_gitproject(gerritprj, localdir, giturl=None, bare=False, gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None, git_cache_dir=None):
"""Clone gerrit project from remote to local dir"""
if not giturl:
- giturl = 'ssh://%s@%s:%s' % (os.getenv('GERRIT_USERNAME'),
- os.getenv('GERRIT_HOSTNAME'),
- os.getenv('GERRIT_SSHPORT'))
+ if not gerrit_hostname or not gerrit_username or not gerrit_sshport:
+ giturl = 'ssh://%s@%s:%s' % (os.getenv('GERRIT_USERNAME'),
+ os.getenv('GERRIT_HOSTNAME'),
+ os.getenv('GERRIT_SSHPORT'))
+ else:
+ giturl = 'ssh://%s@%s:%s' % (gerrit_username,gerrit_hostname,gerrit_sshport)
- return _clone_gitproject(giturl, gerritprj, localdir, bare)
+ return _clone_gitproject(giturl, gerritprj, localdir, bare, git_cache_dir)
-def fetch_change(gerritprj, localdir, refspec, giturl=None, bare=False):
+def fetch_change(gerritprj, localdir, refspec, giturl=None, bare=False, gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None, git_cache_dir=None):
"""Fecth and checkout change to local dir"""
if not giturl:
- giturl = 'ssh://%s@%s:%s/%s' % (os.getenv('GERRIT_USERNAME'),
- os.getenv('GERRIT_HOSTNAME'),
- os.getenv('GERRIT_SSHPORT'),
- gerritprj)
+ if not gerrit_hostname or not gerrit_username or not gerrit_sshport:
+ giturl = 'ssh://%s@%s:%s/%s' % (os.getenv('GERRIT_USERNAME'),
+ os.getenv('GERRIT_HOSTNAME'),
+ os.getenv('GERRIT_SSHPORT'),
+ gerritprj)
+ else:
+ giturl = 'ssh://%s@%s:%s' % (gerrit_username,gerrit_hostname,gerrit_sshport,gerritprj)
git = Git.create(localdir, bare)
git.fetch(repo=giturl, refspec=refspec)
git.checkout('FETCH_HEAD')
+
return self.__get_mapping(path, branch) or []
return []
+ def get_contains_branch_mapping(self, project, branch=None):
+ """ Get contains branch mapping """
+ branchs = []
+
+ if self.mapping_obj.project:
+ for prj in self.mapping_obj.project:
+ if prj.name == project or prj.name == '/' + project:
+ branchs.append(prj.branch)
+
+ if self.mapping_obj.default:
+ # Search in path list
+ prj_path = project
+ while True:
+ # parent directory of project/directory
+ prj_path = os.path.dirname(os.path.join('/', prj_path))
+
+ if prj_path == '/':
+ for path in self.mapping_obj.default.path:
+ if path.name == '/':
+ if branch == path.branch:
+ return branchs.append(path.branch) or []
+ return []
+
+ # Search path
+ for path in self.mapping_obj.default.path:
+ # path match
+ if os.path.dirname(os.path.join('/', path.name) +
+ '/') == prj_path:
+ if branch == path.branch:
+ return branchs.append(path.branch) or []
+ return []
+ return []
class MappingV2(object):
"""A class to handle mapping xml file """
return mapping
+ def get_mapping_list(self, obs_project, staging_project):
+ """Get all list"""
+ mapping = []
+
+ if self.mapping_obj.configure:
+ # if configure.enable is false then return []
+ if self.mapping_obj.configure.enable == 'false':
+ return mapping
+
+ if self.mapping_obj.branch:
+ for brch in self.mapping_obj.branch:
+ if (obs_project and brch.OBS_project != obs_project) or (staging_project and brch.OBS_staging_project != staging_project):
+ continue
+ for prj in brch.project:
+ mapping.append(self.__encode_to_ascii([prj.name, brch.name, brch.OBS_project, brch.OBS_staging_project, prj.OBS_package]))
+ return mapping
+
+ return mapping
+
+ def get_contains_branch_mapping(self, project, branch=None):
+ """ Get contains branch mapping """
+ branchs = []
+ if self.mapping_obj.configure:
+ # if configure.enable is false then return []
+ if self.mapping_obj.configure.enable == 'false':
+ return branchs
+
+ if self.mapping_obj.branch:
+ for brch in self.mapping_obj.branch:
+ for prj in brch.project:
+ if (prj.name == project or prj.name == '/' + project) and (branch in brch.name.split(",") ):
+ for b in brch.name.split(","):
+ branchs.append(b)
+
+ return branchs
def git_obs_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None, \
if not os.path.isfile(mapping_path_v1):
print 'Cloning %s' % mapping_prj
if not clone_gitproject(mapping_prj, \
- os.path.join(git_cache, mapping_prj)):
+ os.path.join(git_cache, mapping_prj), \
+ gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
raise MappingError('Error cloning %s' % mapping_prj)
# get mappings v2
for file in mapping_v2_file_lists:
mymapping_v2 = MappingV2(file)
- obs_prjs.extend(mymapping_v2.get_submit_mapping(gerrit_prj, gerrit_branch,
+ obs_prjs.extend(mymapping_v2.get_submit_mapping(gerrit_prj,
+ gerrit_branch,
include_config=include_config))
# remove overlapped items
found.append([item.name, item.OBS_project, item.OBS_staging_project])
return found
+def git_contains_branch_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
+ gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None):
+ """
+ Find an branch by parsing git-obs-mapping.xml.
+ """
+ def remove_overlaps(orig_list):
+ """docstring for make_unique"""
+ result = []
+ [result.append(obj) for obj in orig_list if obj not in result]
+ return result
+
+ def get_xml_file_list(path):
+ file_list = []
+ for root, dirs, files in os.walk(path):
+ for file in files:
+ if file.endswith('.xml'):
+ file_list.append(os.path.join(path, file))
+ return file_list
+
+ if gitcache:
+ git_cache = gitcache
+ else:
+ git_cache = os.getenv("GIT_CACHE_DIR")
+
+ mapping_prj = os.getenv("MAPPING_PRJ")
+
+ git_obs_mapping_path = os.path.join(git_cache, mapping_prj)
+ mapping_path_v1 = '{0}/git-obs-mapping.xml'.format(git_obs_mapping_path)
+
+ if not os.path.isfile(mapping_path_v1):
+ print 'Cloning %s' % mapping_prj
+ if not clone_gitproject(mapping_prj, \
+ os.path.join(git_cache, mapping_prj), \
+ gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
+ raise MappingError('Error cloning %s' % mapping_prj)
+
+ # get mappings v1
+ mymapping = Mapping(mapping_path_v1)
+ branchs = mymapping.get_contains_branch_mapping(gerrit_prj, gerrit_branch)
+
+ # get v2 mapping files list
+ mapping_path_v2 = '{0}/profiles/'.format(git_obs_mapping_path)
+ mapping_v2_file_lists = get_xml_file_list(mapping_path_v2)
+
+ # get mappings v2
+ for file in mapping_v2_file_lists:
+ mymapping_v2 = MappingV2(file)
+ branchs.extend(mymapping_v2.get_contains_branch_mapping(gerrit_prj,
+ gerrit_branch))
+ branchs = remove_overlaps(branchs)
+ return branchs
+
+def git_obs_map_full_list(obs_project=None, staging_project=None, gitcache=None, \
+ gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None):
+ """
+ Find an OBS project[s correspondent to Gerrit project and branch
+ by parsing git-obs-mapping.xml.
+ """
+
+ def remove_overlaps(orig_list):
+ """docstring for make_unique"""
+ result = []
+ [result.append(obj) for obj in orig_list if obj not in result]
+ return result
+
+ def get_xml_file_list(path):
+ file_list = []
+ for root, dirs, files in os.walk(path):
+ for file in files:
+ if file.endswith('.xml'):
+ file_list.append(os.path.join(path, file))
+ return file_list
+
+ if gitcache:
+ git_cache = gitcache
+ else:
+ git_cache = os.getenv("GIT_CACHE_DIR")
+
+ mapping_prj = os.getenv("MAPPING_PRJ")
+
+ git_obs_mapping_path = os.path.join(git_cache, mapping_prj)
+ mapping_path_v1 = '{0}/git-obs-mapping.xml'.format(git_obs_mapping_path)
+
+ if not os.path.isfile(mapping_path_v1):
+ print 'Cloning %s' % mapping_prj
+ if not clone_gitproject(mapping_prj, \
+ os.path.join(git_cache, mapping_prj), \
+ gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
+ raise MappingError('Error cloning %s' % mapping_prj)
+
+ obs_prjs = []
+
+ # get v2 mapping files list
+ mapping_path_v2 = '{0}/profiles/'.format(git_obs_mapping_path)
+ mapping_v2_file_lists = get_xml_file_list(mapping_path_v2)
+
+ # get mappings v2
+ for file in mapping_v2_file_lists:
+ mymapping_v2 = MappingV2(file)
+ obs_prjs.extend(mymapping_v2.get_mapping_list(obs_project, staging_project))
+
+ # remove overlapped items
+ obs_prjs = remove_overlaps(obs_prjs)
+
+ return obs_prjs
+
+def get_ref_map(gerrit_prj, gerrit_branch=None, gitcache=None, \
+ gerrit_hostname=None, gerrit_username=None, gerrit_sshport=None):
+ """
+ Find an OBS project[s correspondent to Gerrit project and branch
+ by parsing git-obs-mapping.xml.
+ """
+
+ if gitcache:
+ git_cache = gitcache
+ else:
+ git_cache = os.getenv("GIT_CACHE_DIR")
+
+ mapping_prj = os.getenv("REF_MAPPING_PRJ")
+
+ git_ref_mapping_path = os.path.join(git_cache, mapping_prj)
+ mapping_path = '{0}/git-ref-mapping.xml'.format(git_ref_mapping_path)
+
+ if not os.path.isfile(mapping_path):
+ print 'Cloning %s' % mapping_prj
+ if not clone_gitproject(mapping_prj, \
+ os.path.join(git_cache, mapping_prj), \
+ gerrit_hostname=gerrit_hostname, gerrit_username=gerrit_username, gerrit_sshport=gerrit_sshport):
+ raise MappingError('Error cloning %s' % mapping_prj)
+
+ # get mappings
+ mymapping = Mapping(mapping_path)
+ obs_prjs = mymapping.get_submit_mapping(gerrit_prj, gerrit_branch)
+
+ return obs_prjs
+
from gitbuildsys.errors import ObsError
from common import utils
-from common.mapping import git_obs_map
+from common.mapping import git_obs_map, git_contains_branch_map
from common.git import Git, clone_gitproject
from common.upload_service import upload_obs_service, UploadError
from common.gerrit import is_ref_deleted
UNKNOWN_FORMAT_MSG = '- Unknown tag format,\n please follow the format '\
'submit/{version}/{date.time}.'
+WRONG_FORMAT_MSG = '- Wrong tag format,\n please follow the format '\
+ 'submit/{branch}/{date.time}. \n'\
+ 'Your branch : %s. Git-obs-mapping branch: %s'
+
NOT_ANNOTATED_MSG = '- Tag should be annotated tag.'
SUGGESTION = 'Suggest to use "gbs submit" to trigger submission\n'\
return tag
-def check_tag_format(git, mygerrit, event, tag):
+def check_tag_format(git, mygerrit, event, tag, mappingbranchs):
"""check whether tag follow proper format"""
branch, date = parse_submit_tag(tag)
else:
# cannot find tagged commit in git tree or gerrit open change
message.append(WRONG_COMMIT_MSG % psr)
+
+ contain = False
+ containsbranch = git.branch_contains(tag)
+ for b in containsbranch:
+ if b in mappingbranchs:
+ contain = True
+ if not contain:
+ # wrong tag format
+ message.append(WRONG_FORMAT_MSG % (containsbranch,mappingbranchs))
+
else:
# wrong tag format
message.append(UNKNOWN_FORMAT_MSG)
sourceinfo = build.get_sourceinfo_list(obs_target_prj)
for package in sourceinfo:
if sourceinfo[package]:
- link_prj, link_pkg = sourceinfo[package].split('/')
+ link_prj, link_pkg = sourceinfo[package][-1].split('/')
if link_prj == obs_target_prj and link_pkg == pre_package:
build.create_link_pac(obs_pre_prj, pre_package, \
obs_pre_prj, package)
if gerrit_account_email:
submitter += ' <%s>' % gerrit_account_email
+ mappingbranchs = git_contains_branch_map(event['project'], git_branch, \
+ gitcache=git_cache, \
+ gerrit_hostname=event['hostname'], \
+ gerrit_username=event['username'], \
+ gerrit_sshport=event['sshport']
+ )
+
+ # precheck tpk branch (ABS)
+ if '_tpk' in git_branch:
+ mappingbranchs.append(git_branch)
+ print 'mapping branch = %s ' %(mappingbranchs)
+
# check whether tag meet format
- if not check_tag_format(mygit, mygerrit, event, tag):
+ if not check_tag_format(mygit, mygerrit, event, tag, mappingbranchs):
print 'The check for the tag format is error, exit now\n'
return 0