# Contributors:
# - S-Core Co., Ltd
+import logging
import os
import base64
import collections
def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
sumtype=None, checksum=None):
+ logger = logging.getLogger(__name__)
url = os.path.join(baseurl, filename)
filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
file_checksum = result.split()[0]
if file_checksum and file_checksum == checksum:
+ logger.info('use a cache file - ' + str(url))
return filename
return _get_uncompressed_data_from_url(url, filename_tmp, proxies)
# make directory for caching
file.make_dirs(cache_dir)
- #TODO: support proxy
+ #TODO: support proxy
url = os.path.join(baseurl, 'repodata/repomd.xml')
repomd = myurlgrab(url, cache_file, None)
if item not in filepaths:
filepaths[item] = None
continue
-
filepaths[item] = _get_metadata_from_repo(baseurl,
None,
cachedir,