[TIC-CORE] fix partition for default recipe
[archive/20170607/tools/tic-core.git] / tic / repo.py
index 5a03cd2..82c5de7 100644 (file)
 # Contributors:
 # - S-Core Co., Ltd
 
+import logging
 import os
 import base64
-import collections
 import hashlib
+import collections
 from lxml import etree
 from tic.utils import file
 from tic.utils import process
 from tic.utils.error import TICError
-from tic.utils.grabber import myurlgrab
-
+from tic.utils.grabber import myurlgrab2
+from tic.utils import misc
 
-def _get_uncompressed_data_from_url(url, filename, proxies):
+def _get_uncompressed_data_from_url(url, filename, proxies=None):
     # download file
-    filename = myurlgrab(url, filename, proxies)
+    filename = myurlgrab2(url, filename)
     # Check if file compressed or not
     if filename.endswith(".gz"):
         decompress_filename = os.path.splitext(filename)[0]
@@ -40,18 +41,17 @@ def _get_uncompressed_data_from_url(url, filename, proxies):
     elif filename.endswith(".bz2"):
         process.run(['bunzip2', "-f", filename])
         filename = os.path.splitext(filename)[0]
-        
     return filename
 
-def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
+def _get_metadata_from_repo(baseurl, proxies, tempdir, cachedir, reponame, filehref,
                             sumtype=None, checksum=None):
-    url = os.path.join(baseurl, filename)
-    filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
+    logger = logging.getLogger(__name__)
+    url = os.path.join(baseurl, filehref)
+    filename_tmp = str("%s/%s" % (cachedir, os.path.basename(filehref)))
     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
         filename = os.path.splitext(filename_tmp)[0]
     else:
         filename = filename_tmp
-        
     if sumtype and checksum and os.path.exists(filename):
         if sumtype == 'sha256':
             file_checksum = hashlib.sha256(open(filename, 'rb').read()).hexdigest()
@@ -61,34 +61,43 @@ def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
             sumcmd = "%ssum" % sumtype
             result = process.run([sumcmd, filename])[1].strip()
             file_checksum = result.split()[0]
-
+        # use cached file
         if file_checksum and file_checksum == checksum:
+            logger.info('use a cache file - ' + str(filename))
             return filename
 
-    return _get_uncompressed_data_from_url(url, filename_tmp, proxies)
-
+    temp_file = os.path.join(tempdir, os.path.basename(filehref))
+    file_path =_get_uncompressed_data_from_url(url, temp_file, proxies)
+    return file.copyfile_flock(file_path, filename)
 
 def get_repodata_from_repos(repos, cachedir):
     my_repodata = []
+    temp_path = os.path.join(cachedir, 'temp', str(misc.get_timestamp()))
     for repo in repos:
-        reponame = repo.name
-        baseurl = repo.baseurl
-        cache_dir = os.path.join(cachedir, reponame)
-        cache_file = os.path.join(cache_dir, 'repomd.xml')
-        
-        # make directory for caching
-        file.make_dirs(cache_dir)
-        #TODO: support proxy 
-        
+        reponame = repo.get('name')
+        baseurl = repo.get('url')
+
+        # make temp_dir
+        base64url = base64.urlsafe_b64encode(baseurl)
+        temp_dir = os.path.join(temp_path, base64url);
+        repomd_file = os.path.join(temp_dir, 'repomd.xml')
+        file.make_dirs(temp_dir);
+
+        #TODO: support proxy
         url = os.path.join(baseurl, 'repodata/repomd.xml')
-        repomd = myurlgrab(url, cache_file, None)
-        
+        repomd = myurlgrab2(url, repomd_file)
+
         try:
             tree = etree.parse(repomd)
             root = tree.getroot()
         except etree.XMLSyntaxError:
             raise TICError("repomd.xml syntax error.")
 
+        # make cache_dir
+        repo_checksum = hashlib.sha256(open(repomd_file, 'rb').read()).hexdigest();
+        cache_dir = os.path.join(cachedir, 'cached', base64url, repo_checksum)
+        file.make_dirs(cache_dir)
+
         ns = root.tag
         ns = ns[0:ns.rindex("}")+1]
 
@@ -114,40 +123,26 @@ def get_repodata_from_repos(repos, cachedir):
             if item not in filepaths:
                 filepaths[item] = None
                 continue
-            
             filepaths[item] = _get_metadata_from_repo(baseurl,
                                                       None,
-                                                      cachedir,
+                                                      temp_dir,
+                                                      cache_dir,
                                                       reponame,
                                                       filepaths[item],
                                                       sumtypes[item],
                                                       checksums[item])
         my_repodata.append({"name":reponame,
                             "baseurl":baseurl,
+                            "checksum":repo_checksum,
                             "repomd":repomd,
                             "primary":filepaths['primary'],
-                            "cachedir":cachedir,
+                            "cachedir":cache_dir,
                             "proxies":None,
                             "patterns":filepaths['patterns'],
                             "comps":filepaths['comps']})
-        
     return my_repodata
 
 
-RepoType = collections.namedtuple('Repo', 'name, baseurl')
+RepoType = collections.namedtuple('Repo', 'name, url')
 def Repo(name, baseurl):
-    return RepoType(name, baseurl)
-
-if __name__ == '__main__':
-    repo_url_1 = 'https://download.tizen.org/snapshots/tizen/base/latest/repos/arm64/packagesaaa'
-    repo_url_2 = 'https://download.tizen.org/snapshots/tizen/mobile/latest/repos/arm64-wayland/packages'
-    repos = []
-    repos.append(Repo(base64.urlsafe_b64encode(repo_url_1), repo_url_1))
-    repos.append(Repo(base64.urlsafe_b64encode(repo_url_2), repo_url_2))
-    cachedir = '/var/tmp/tic-core/cached'
-    repodata = get_repodata_from_repos(repos, cachedir)
-    print(repodata)
-    
-    
-    
+    return RepoType(name, baseurl)
\ No newline at end of file