if args.ccache:
cmd += ['--ccache']
- if args.pkg_ccache:
- cmd += ['--pkg-ccache=%s' % args.pkg_ccache]
-
if args.extra_packs:
cmd += ['--extra-packs=%s' % args.extra_packs]
while not stop.is_set():
# Wait before printing to avoid output on short wait
stop.wait(1)
- if not stop.is_set():
- sys.stderr.write('.')
- sys.stderr.flush()
- printed.set()
+# if not stop.is_set():
+# sys.stderr.write('.')
+# sys.stderr.flush()
+# printed.set()
stop = threading.Event()
printed = threading.Event()
return func(*args, **kwargs)
finally:
stop.set()
- if printed.is_set():
- sys.stderr.write('\n')
+# if printed.is_set():
+# sys.stderr.write('\n')
return _wait_with_print
LOGGER.setLevel(INFO)
# Set output format and verbosity for gbp
- if debug:
+# if debug:
log_fmt = '%(color)s%(name)s:%(levelname)s: %(coloroff)s%(message)s'
gbp.log.LOGGER.setLevel(DEBUG)
- else:
+# else:
log_fmt = '%(color)s%(levelname)s: %(coloroff)s%(message)s'
LOGGER.set_format(log_fmt)
if hasattr(gbp.log, 'initialize'):
Parse build.xml.
Returns: dictionary with buildconf, repos and archs.
"""
- if not (build_xml and os.path.exists(build_xml)):
- return
+ return
+# if not (build_xml and os.path.exists(build_xml)):
+# return
- try:
- etree = ET.parse(build_xml)
- except ET.ParseError:
- log.warning('Not well formed xml: %s' % build_xml)
- return
+# try:
+# etree = ET.parse(build_xml)
+# except ET.ParseError:
+# log.warning('Not well formed xml: %s' % build_xml)
+# return
- meta = {}
- root = etree.getroot()
+# meta = {}
+# root = etree.getroot()
# Get version of build.xml
- build_version = root.get('version')
+# build_version = root.get('version')
# It's new format of repo structure if 'version' exists
- if build_version:
- log.warning('new format repo structure has not been supportted '
- 'well, please upgrade your gbs to latest version')
- return None
+# if build_version:
+# log.warning('new format repo structure has not been supportted '
+# 'well, please upgrade your gbs to latest version')
+# return None
- buildelem = root.find('buildconf')
+# buildelem = root.find('buildconf')
# Must using None here, "if buildelem" is wrong
# None means item does not exist
# It's different from bool(buildelem)
- if buildelem is not None:
- meta['buildconf'] = buildelem.text.strip()
-
- repo_items = root.find('repos')
- if repo_items is not None:
- meta['repos'] = [repo.text.strip()
- for repo in repo_items.findall('repo')]
-
- arch_items = root.find('archs')
- if arch_items is not None:
- meta['archs'] = [arch.text.strip()
- for arch in arch_items.findall('arch')]
- id_item = root.find('id')
- if id_item is not None:
- meta['id'] = id_item.text.strip()
+# if buildelem is not None:
+# meta['buildconf'] = buildelem.text.strip()
- return meta
+# repo_items = root.find('repos')
+# if repo_items is not None:
+# meta['repos'] = [repo.text.strip()
+# for repo in repo_items.findall('repo')]
+
+# arch_items = root.find('archs')
+# if arch_items is not None:
+# meta['archs'] = [arch.text.strip()
+# for arch in arch_items.findall('arch')]
+# id_item = root.find('id')
+# if id_item is not None:
+# meta['id'] = id_item.text.strip()
+
+# return meta
def build_repos_from_buildmeta(self, baseurl, meta):
"""Parse build.xml and pickup standard repos it contains."""
Returns: file name if fetch succeds, else None.
"""
fname = os.path.join(self.cachedir, os.path.basename(url))
-
try:
self.urlgrabber.grab(url, fname, url.user, url.passwd, no_cache)
except PageNotFound:
if self.buildconf:
return
- latest_repo_url = repo.pathjoin('../../../../')
- if latest_repo_url.find('../') >= 0:
- return
- meta = self._fetch_build_meta(latest_repo_url)
- if meta:
- self._fetch_build_conf(latest_repo_url, meta)
- return
+ #latest_repo_url = repo.pathjoin('../../../../')
+ #if latest_repo_url.find('../') >= 0:
+ # return
+ #meta = self._fetch_build_meta(latest_repo_url)
+ #if meta:
+ # self._fetch_build_conf(latest_repo_url, meta)
+ #return
# Check if it's repo with builddata/build.xml exist
meta = self._fetch_build_meta(repo)
class GitRefMappingParser(object):
"""git-ref-mapping parser for get reference binary id."""
- def __init__(self, giturl='https://git.tizen.org/cgit/scm/git-ref-mapping'):
+ def __init__(self, giturl='ssh://wangbiao@review.tizen.org:29418/scm/git-ref-mapping'):
self._giturl = giturl
def parse(self):
refxml = os.path.expanduser('~/.ref-gbs/git-ref-mapping/git-ref-mapping.xml')
if os.path.exists(refxml):
cmd = ['git', 'pull']
- else:
- if not os.path.exists(workdir):
- os.makedirs(workdir)
+ #else:
+ # if not os.path.exists(workdir):
+ # os.makedirs(workdir)
- cmd = ['git', 'clone', self._giturl, workdir]
+ # cmd = ['git', 'clone', self._giturl, workdir]
try:
with Workdir(workdir):
output = subprocess.Popen(cmd, stdout=subprocess.PIPE)
log.info('the following uncommitted changes would be included'
':\n %s' % '\n '.join(uncommitted_files))
-def hexdigest(fhandle, block_size=4096):
- """Calculate hexdigest of file content."""
- md5obj = hashlib.new('md5')
- while True:
- data = fhandle.read(block_size)
- if not data:
- break
- md5obj.update(data)
- return md5obj.hexdigest()
-
-
-def show_file_from_rev(git_path, relative_path, commit_id):
- """Get a single file content from given git revision."""
- args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
- try:
- with Workdir(git_path):
- return subprocess.Popen(args,
- stdout=subprocess.PIPE).communicate()[0]
- except (subprocess.CalledProcessError, OSError) as err:
- log.debug('failed to checkout %s from %s:%s' % (relative_path,
- commit_id, str(err)))
- return None
+#def hexdigest(fhandle, block_size=4096):
+# """Calculate hexdigest of file content."""
+# md5obj = hashlib.new('md5')
+# while True:
+# data = fhandle.read(block_size)
+# if not data:
+# break
+# md5obj.update(data)
+# return md5obj.hexdigest()
+
+
+#def show_file_from_rev(git_path, relative_path, commit_id):
+# """Get a single file content from given git revision."""
+# args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
+# try:
+# with Workdir(git_path):
+# return subprocess.Popen(args,
+# stdout=subprocess.PIPE).communicate()[0]
+# except (subprocess.CalledProcessError, OSError) as err:
+# log.debug('failed to checkout %s from %s:%s' % (relative_path,
+# commit_id, str(err)))
+# return None
def file_exists_in_rev(git_path, relative_path, commit_id, dir_only=False):
def glob_in_rev(git_path, pattern, commit_id):
"""Glob pattern in given revision."""
-
path = os.path.dirname(pattern)
args = ['git', 'ls-tree', '--name-only', commit_id, '%s/' % path]
def edit(initial_content=None):
"""
Launch an editor to get input from user.
- Returns: content of user input.
+ Returns: content of user input.
"""
editor = get_editor_cmd()
temp = TempCopy(initial_content)
- subprocess.call('%s %s' % (editor, temp.name), shell=True)
+ #subprocess.call('%s %s' % (editor, temp.name), shell=True)
if temp.is_changed():
with open(temp.name) as fobj:
import gitbuildsys.conf
#after python3.0, reload has been moved to imp std lib.
from imp import reload
+from gitbuildsys.conf import MappingConfigParser
FILE_DIRNAME = os.path.dirname(os.path.abspath(__file__))
'project1.ini'))
self.assertEqual('homev2', self.get('section', 'home_only_key'))
+ def test_MappingConfigParser_obs_meta_(self):
+ '''test get obs meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_prefix_meta_(self):
+ '''test get prefix meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_repo_meta_(self):
+ '''test get repo meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_profile_meta_(self):
+ '''test get profile meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_source_meta_(self):
+ '''test get source meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_obs_meta_(self):
+ '''test get obs meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetObsMapping()
+
+ def test_MappingConfigParser_get_prefix_mapping_(self):
+ '''test get prefix meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ mapparser.GetPrefixMapping()
+
+ def test_MappingConfigParser_get_repo_mapping_(self):
+ '''test get repo meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetRepoMapping()
+
+ def test_MappingConfigParser_get_profile_mapping_(self):
+ '''test get profile meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetProfileMapping()
+
+ def test_MappingConfigParser_get_source_mapping_(self):
+ '''test get source meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetSourceMapping()
+
+ def test_MappingConfigParser_get_osc_mapping_(self):
+ '''test get osc meta by class MappingConfigParser'''
+ mapparser = MappingConfigParser('/usr/share/gbs/mapping.conf')
+ obs_meta = mapparser.GetOscMapping()
+
if __name__ == '__main__':
unittest.main()
-"""Functionality tests for gbs export."""
+"""Functionality tests for gbsexport."""
import unittest
import imp
import os
import shutil
import tempfile
-from nose.tools import eq_
+from nose.tools import raises, eq_
GBS = imp.load_source("gbs", "./tools/gbs").main
except SystemExit as err:
eq_(err.code, 2)
+ @raises(Exception)
+ def test_command_export_special_spec(self):
+ """Test use Invalid upstream treeish upstream"""
+ os.chdir('/home/build/acl')
+ GBS(argv=["gbs", "export", "--include-all", "-o", "/tmp"])
+ pass
"""Test raising exception when running gbs with not existing path."""
GBS(argv=["gbs", "import", "I don't exist!"])
+ @raises(GbsError)
+ def test_log_clone(self):
+ """Test the waiting funciton wiht gbs clone command"""
+ GBS(argv=["gbs", "clone", "I don't exist!"])
--- /dev/null
+#!/usr/bin/python -tt
+# vim: ai ts=4 sts=4 et sw=4
+#
+# Copyright (c) 2012 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+"""Unit tests for utils library"""
+
+import unittest
+from gitbuildsys.safe_url import SafeURL
+from gitbuildsys.utils import URLGrabber
+from gitbuildsys.utils import Temp, RepoParser,edit_file
+from gitbuildsys.utils import GitRefMappingParser, GerritNameMapper
+import os
+import gitbuildsys.conf
+from gitbuildsys.conf import configmgr
+import requests
+
+class UtilsTest(unittest.TestCase):
+ '''Test SafeURL class'''
+
+ def test_urlgrabber(self):
+ '''test urlgrabber class'''
+ self.urlgrabber = URLGrabber()
+ #self.urlgrabber.perform()
+
+ def test_repoparser(self):
+ '''test repoparser'''
+ reload(gitbuildsys.conf)
+ profile = gitbuildsys.conf.configmgr.get_current_profile()
+ TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), 'test-gbs' )
+ cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'),
+ directory=True)
+ cachedir = cache.path
+ repos = [i.url for i in profile.repos]
+ repoparser = RepoParser(repos, cachedir)
+ repourls = repoparser.get_repos_by_arch('armv7l')
+
+ def test_given_profile(self):
+ '''test repoparse with certain profile file'''
+ FILE_DIRNAME = os.path.dirname(os.path.abspath(__file__))
+ conf_file = os.path.join(FILE_DIRNAME, 'testdata', 'base.cfg')
+ reload(gitbuildsys.conf)
+ configmgr.add_conf(conf_file)
+ profile = configmgr.build_profile_by_name('profile.tizen')
+ TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), 'test-gbs' )
+ cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'),
+ directory=True)
+ cachedir = cache.path
+ repos = [i.url for i in profile.repos]
+ repoparser = RepoParser(repos, cachedir)
+ repourls = repoparser.get_repos_by_arch('armv7l')
+
+ def test_edit_file(self):
+ '''test edit file fuction'''
+ os.system('cp ./tests/testdata/fake.spec editfile')
+ edit_file('editfile')
+
+ def test_GitRefMappingParser(self):
+ '''test class GitRefMappingParser'''
+ refparser = GitRefMappingParser()
+ ref_meta = refparser.parse()
+
+ def test_GerritNameMapper(self):
+ '''test calss GerritNameMapper'''
+
+ profile = configmgr.get_current_profile()
+ profile_pkgs_url = 'http://download.tizen.org/releases/milestone/tizen/unified/latest/builddata/depends/Tizen:Unified_standard_armv7l_revpkgdepends.xml'
+ r = requests.get('http://download.tizen.org/releases/milestone/tizen/unified/latest/builddata/depends/Tizen:Unified_standard_armv7l_revpkgdepends.xml')
+ USERID = 'test'
+ TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), '%s-gbs' % USERID)
+ cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'), directory=True)
+ cachedir = cache.path
+ profile_repo = 'http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/'
+
+ repoparser = RepoParser([SafeURL(profile_repo)], cachedir)
+
+ gnmapper = GerritNameMapper(r.content, repoparser.primaryxml)
+ gnmapper.get_gerritname_by_srcname('rpm')
+ gnmapper.get_gerritname_by_obsname('rpm')
+ gnmapper.get_pkgname_by_srcname('rpm')
+
+
+FILE_DIRNAME = os.path.dirname(os.path.abspath(__file__))
+
+
+class Fixture(object):
+ '''test fixture for testing config'''
+
+ PATH = os.path.join(FILE_DIRNAME, 'testdata', 'ini')
+
help='working in offline mode. Start building directly')
group.add_argument('--ccache', action="store_true",
help='use ccache to speed up rebuilds')
- group.add_argument('--pkg-ccache',
- help='set ccache.tar file and enable ccache option '
- ', use ccache.tar file to speed up rebuilds')
group.add_argument('--icecream', type=int, default=0,
help='Use N parallel build jobs with icecream')
group.add_argument('--threads', type=int, default=1,