#!/usr/bin/python
-# Copyright (c) 2000 - 2016 Samsung Electronics Co., Ltd. All rights reserved.
+# Copyright (c) 2016 Samsung Electronics Co., Ltd
#
-# Contact:
-# @author Chulwoo Shin <cw1.shin@samsung.com>
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
+# Licensed under the Flora License, Version 1.1 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://floralicense.org/license/
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
from tic.utils.error import TICError
from tic.utils.grabber import myurlgrab2
from tic.utils import misc
+from tic.config import configmgr
+
+REPOMD_EL_PRIMARY = 'primary'
+REPOMD_EL_PATTERNS = 'patterns'
+REPOMD_EL_COMPS = 'comps'
+REPOMD_EL_GROUP = 'group'
+REPOMD_EL_TYPE = 'type'
+REPOMD_ATTRIB_LOCATION = '%slocation'
+REPOMD_ATTRIB_LOCATION = '%sopen-checksum'
def _get_uncompressed_data_from_url(url, filename, proxies=None):
# download file
filename = os.path.splitext(filename)[0]
return filename
-def _get_metadata_from_repo(baseurl, proxies, tempdir, cachedir, reponame, filehref,
+def _get_repodata(baseurl, proxies, tempdir, cachedir, reponame, filehref,
sumtype=None, checksum=None):
logger = logging.getLogger(__name__)
url = os.path.join(baseurl, filehref)
return file.copyfile_flock(file_path, filename)
def get_repodata_from_repos(repos, cachedir):
- my_repodata = []
+ logger = logging.getLogger(__name__)
+
+ def _set_attrib(ns, key, element):
+ fpath_info[key] = element.find(''.join([ns, 'location'])).attrib['href']
+ checksum = element.find(''.join([ns, 'open-checksum']))
+ checksum_info[key] = checksum.text
+ sumtype_info[key] = checksum.attrib['type']
+
+ repodata = []
temp_path = os.path.join(cachedir, 'temp', str(misc.get_timestamp()))
for repo in repos:
reponame = repo.get('name')
repomd_file = os.path.join(temp_dir, 'repomd.xml')
file.make_dirs(temp_dir);
- #TODO: support proxy
+ #TODO: support local files(local directory)
+ # local/remote repository
url = os.path.join(baseurl, 'repodata/repomd.xml')
repomd = myurlgrab2(url, repomd_file)
try:
tree = etree.parse(repomd)
- root = tree.getroot()
- except etree.XMLSyntaxError:
- raise TICError("repomd.xml syntax error.")
+ t_root = tree.getroot()
+ except etree.XMLSyntaxError as e:
+ logger.info(e)
+ raise TICError(configmgr.message['xml_parse_error'] % ('repomd.xml', url))
# make cache_dir
- repo_checksum = hashlib.sha256(open(repomd_file, 'rb').read()).hexdigest();
+ repo_checksum = hashlib.sha256(open(repomd_file, 'rb').read()).hexdigest()
cache_dir = os.path.join(cachedir, 'cached', base64url, repo_checksum)
file.make_dirs(cache_dir)
+
+ fpath_info = dict()
+ checksum_info = dict()
+ sumtype_info = dict()
- ns = root.tag
- ns = ns[0:ns.rindex("}")+1]
-
- filepaths = {}
- checksums = {}
- sumtypes = {}
-
- for elm in root.findall("%sdata" % ns):
- if elm.attrib['type'] == 'patterns':
- filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
- checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
- sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
- elif elm.attrib['type'] == 'group':
- filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
- checksums['comps'] = elm.find("%sopen-checksum" % ns).text
- sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
- elif elm.attrib["type"] == 'primary':
- filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
- checksums['primary'] = elm.find("%sopen-checksum" % ns).text
- sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
+ namespace = t_root.tag
+ namespace = namespace[0:namespace.rindex('}')+1]
+
+ for element in t_root.findall(''.join([namespace, 'data'])):
+ if element.attrib[REPOMD_EL_TYPE] == REPOMD_EL_GROUP:
+ # group(comps)
+ _set_attrib(namespace, REPOMD_EL_COMPS, element)
+ else:
+ # type: primary, patterns
+ _set_attrib(namespace, element.attrib[REPOMD_EL_TYPE], element)
- for item in ("primary", "patterns", "comps"):
- if item not in filepaths:
- filepaths[item] = None
- continue
- filepaths[item] = _get_metadata_from_repo(baseurl,
- None,
- temp_dir,
- cache_dir,
- reponame,
- filepaths[item],
- sumtypes[item],
- checksums[item])
- my_repodata.append({"name":reponame,
+ for i_name in [REPOMD_EL_PRIMARY, REPOMD_EL_PATTERNS, REPOMD_EL_COMPS]:
+ if i_name in fpath_info:
+ fpath_info[i_name] = _get_repodata(baseurl,
+ None,
+ temp_dir,
+ cache_dir,
+ reponame,
+ fpath_info[i_name],
+ sumtype_info[i_name],
+ checksum_info[i_name])
+ else:
+ fpath_info[i_name] = None
+
+ repodata.append({"name":reponame,
"baseurl":baseurl,
+ "checksum":repo_checksum,
"repomd":repomd,
- "primary":filepaths['primary'],
+ "primary":fpath_info['primary'],
"cachedir":cache_dir,
"proxies":None,
- "patterns":filepaths['patterns'],
- "comps":filepaths['comps']})
- return my_repodata
+ "patterns":fpath_info['patterns'],
+ "comps":fpath_info['comps']})
+ return repodata
RepoType = collections.namedtuple('Repo', 'name, url')