[TIC-CORE] support caching for analysis data 54/118354/1
authorChulwoo Shin <cw1.shin@samsung.com>
Fri, 10 Mar 2017 05:50:33 +0000 (14:50 +0900)
committerChulwoo Shin <cw1.shin@samsung.com>
Fri, 10 Mar 2017 05:50:33 +0000 (14:50 +0900)
- support caching for analysis data
- fix result data for export commend

Change-Id: Ief7114e167bf0f91d3c1bcfadf18367f238ac759
Signed-off-by: Chulwoo Shin <cw1.shin@samsung.com>
tic/command.py
tic/dependency.py
tic/parser/recipe_parser.py
tic/repo.py
tic/server/tic_server.py
tic/utils/file.py
tic/utils/log.py
tic/utils/rpmmisc.py
tools/tic-core

index 92f3c75..661466c 100644 (file)
@@ -21,6 +21,7 @@
 
 import os
 import logging
+import hashlib
 
 from tic.dependency import get_installed_packages
 from tic.parser.recipe_parser import get_default_recipe, convert_recipe_to_yaml
@@ -32,6 +33,7 @@ from tic.repo import get_repodata_from_repos
 from tic.pykickstarter import KSoption, kswriter
 from tic.utils import process
 from tic.utils import misc
+from tic.utils import file
 
 DEFAULT_CACHEDIR='/var/tmp/tic-core'
 DEFAULT_ANALYSIS_CACHEDIR='/var/tmp/tic-core/analysis'
@@ -60,13 +62,27 @@ def analyze(repo_list, recipe_list=None):
     #Download repodata from repositories (Remote/Local)
     repoinfo = get_repodata_from_repos(repos, DEFAULT_CACHEDIR)
     logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
+    
+    checksum_list=[]
+    for repo in repoinfo:
+        checksum_list.append(repo['checksum'])
+    all_checksum = hashlib.sha256('_'.join(checksum_list)).hexdigest()
+    analysis_file=os.path.join(DEFAULT_ANALYSIS_CACHEDIR, all_checksum, 'analysis.json')
+    pkg_group=None
+    if os.path.exists(analysis_file):
+        pkg_group=file.read_json(analysis_file)
 
-    start_time = misc.get_timestamp()
-    # Parse the xml files for the analysis of package (.rpm)
-    repo_parser = RepodataParser('armv7l', repoinfo)
-    pkg_group = repo_parser.parse()
-    logger.info('packages: %d, provides: %d, files: %d', len(pkg_group['pkg_dict']), len(pkg_group['provides']), len(pkg_group['files']))
-    logger.info('time to parse repodata: %d ms', misc.get_timestamp() - start_time)
+    if not pkg_group or not pkg_group.get('pkg_dict'):
+        start_time = misc.get_timestamp()
+        # Parse the xml files for the analysis of package (.rpm)
+        repo_parser = RepodataParser('armv7l', repoinfo)
+        pkg_group = repo_parser.parse()
+        logger.info('packages: %d, provides: %d, files: %d', len(pkg_group['pkg_dict']), len(pkg_group['provides']), len(pkg_group['files']))
+        logger.info('time to parse repodata: %d ms', misc.get_timestamp() - start_time)
+        # dump to cached file
+        file.write_json_flock(analysis_file, pkg_group)
+    else:
+        logger.info('use a cache parsing data - %s', analysis_file)
 
     start_time = misc.get_timestamp()
     # Make a data for TIC (Tizen image creation)
@@ -128,8 +144,8 @@ def exports(export_type, recipe, packages, outdir, filename=None):
     # copy the ks to output directory
     output=copyfile(kspath, outdir, filename)
     logger.info('copy the ks file from %s to dst:%s', kspath, output)
-    
-    return output
+    result = {'kspath':output, 'arch':config.get('Architecture')}
+    return result
 
 def createimage(recipes, ksfile, outdir):
     logger = logging.getLogger(__name__)
index 3219b1c..7617b30 100644 (file)
@@ -98,7 +98,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
     def _select_rpm(capability, require):
         provide_list = []
         # 1. Choose the rpm included in version from provides
-        if require.get('ver') is not None:
+        if require.get('ver'):
             for provide in capability:
                 ver_data = provide['data']
                 # If there is no capability version, use version of package
@@ -138,7 +138,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
         
         # all of capability pkg are in conflict
         if max_ver is None:
-            return pkg_dict.get(provide_list[i]['name'])
+            return pkg_dict.get(provide_list[0]['name'])
             
         return pkg_dict.get(max_ver.get('name'))
     
@@ -236,7 +236,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
         _add_conflicts(pkg_info)
         
         # Installation dependency analysis of rpm
-        for dep_tag in ['requires', 'recommends']:
+        for dep_tag in ['recommends', 'requires']:
             if pkg_info.get(dep_tag):
                 for req in pkg_info.get(dep_tag):
                     choose = None
@@ -345,9 +345,9 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
                 
                 # If pkg has a circular dependency and is unchekcable,
                 # circular dep. pkgs can only be visited once
-                if fnode.get('group') and group_visited[fnode['group']][fnode['name']] == 1:
+                gvisite = group_visited.get(fnode.get('group'))
+                if gvisite and gvisite[fnode['name']] == 1:
                     continue
-                
                 _remove_reference(node, fnode)
             node['forward'] = None
             node['group'] = None
index fa41136..6ddd176 100644 (file)
@@ -60,17 +60,17 @@ def get_default_recipe():
         ),
         Configurations=[
             dict(
-                Name='default',
+                Name='tizen-headless',
                 Architecture='armv7l',
                 Schedule= "*",
                 Active= True,
                 Platform= 'Wayland',
                 Part= 'headless',
                 Mic2Options= '-f loop --pack-to=@NAME@.tar.gz',
-                FileName= 'default',
+                FileName= 'tizen-headless-tm1',
                 Repos=['tizen-unified', 'tizen-base'],
                 Groups=[],
-                ExtraPackages= ['building-blocks-root-Headless'],
+                ExtraPackages= [],
                 RemovePackages=[]
             )
         ],
index 82c5de7..b4d499f 100644 (file)
@@ -83,7 +83,8 @@ def get_repodata_from_repos(repos, cachedir):
         repomd_file = os.path.join(temp_dir, 'repomd.xml')
         file.make_dirs(temp_dir);
 
-        #TODO: support proxy
+        #TODO: support local files(local directory)
+        # local/remote repository 
         url = os.path.join(baseurl, 'repodata/repomd.xml')
         repomd = myurlgrab2(url, repomd_file)
 
@@ -91,10 +92,10 @@ def get_repodata_from_repos(repos, cachedir):
             tree = etree.parse(repomd)
             root = tree.getroot()
         except etree.XMLSyntaxError:
-            raise TICError("repomd.xml syntax error.")
+            raise TICError("Unable to parse repomd.xml. Please check the repomd from repository url(%s)", url)
 
         # make cache_dir
-        repo_checksum = hashlib.sha256(open(repomd_file, 'rb').read()).hexdigest();
+        repo_checksum = hashlib.sha256(open(repomd_file, 'rb').read()).hexdigest()
         cache_dir = os.path.join(cachedir, 'cached', base64url, repo_checksum)
         file.make_dirs(cache_dir)
 
index eb5ec12..498b4ca 100644 (file)
@@ -77,6 +77,7 @@ def makeresponse(data, err):
             status = 400 # Bad Request
         elif isinstance(err, Exception):
             status = 500 # Internal Server Error
+            data='Internal Server Error'
         res_body = json.dumps(ResultInfo('false', None, data)._asdict())
     else:
         res_body = json.dumps(ResultInfo('true', data, None)._asdict())
index 050118f..72911eb 100644 (file)
@@ -23,6 +23,7 @@ import errno
 import gzip
 import os
 import shutil
+import json
 
 class FileLockException(Exception):
     pass
@@ -63,6 +64,27 @@ def write(path, data):
     make_dirs(os.path.dirname(path))
     with(open(path, 'w')) as f:
         f.write(data)
+        
+def write_json_flock(path, data):
+    try:
+        make_dirs(os.path.dirname(path))
+        with FileLock(path):
+            with(open(path, 'w')) as f:
+                f.write(json.dumps(data))
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            print(e)
+
+def read_json(path):
+    ret=None
+    try:
+        with open(path) as f:
+            ret=json.load(f)
+    except ValueError as ve:
+        print(ve)
+    except (OSError, IOError) as e:
+        print(e)
+    return ret
 
 def decompress_gzip(intput_path, output_path):
     with(gzip.open(intput_path, 'rb')) as fobj:
index afae483..d901386 100644 (file)
@@ -34,13 +34,13 @@ def setup(root):
     formatter.datefmt = '%Y-%m-%d %H:%M:%S'
     
     mkdir_p(TIC_LOG_DIR)
-    #fileHandler = logging.handlers.RotatingFileHandler(os.path.join(TIC_LOG_DIR, 'tic-core.log'), maxBytes=LOG_FILE_MAX_BYTES, backupCount=10)
+    fileHandler = logging.handlers.RotatingFileHandler(os.path.join(TIC_LOG_DIR, 'tic-core.log'), maxBytes=LOG_FILE_MAX_BYTES, backupCount=10)
     streamHandler = logging.StreamHandler()
     
-    #fileHandler.setFormatter(formatter)
+    fileHandler.setFormatter(formatter)
     streamHandler.setFormatter(formatter)
     
-    #logger.addHandler(fileHandler)
+    logger.addHandler(fileHandler)
     logger.addHandler(streamHandler)
     
 def mkdir_p(path):
index 299896b..92c5911 100644 (file)
@@ -53,10 +53,10 @@ def compare_req_cap_ver(req, cap):
 
 def meetRequireVersion(req_ver, cmp_ver):
     cmp_ret = compare_req_cap_ver(req_ver, cmp_ver)
-    if cmp_ret == 0 and (req_ver['flags'] == 'EQ' or req_ver['flags'] == 'GE' or req_ver['flags'] == 'LE'):
+    if cmp_ret == 0 and (req_ver['flags'] in ['EQ', 'GE', 'LE']):
         return True
-    elif cmp_ret == 1 and (req_ver['flags'] == 'LT' or req_ver['flags'] == 'LE'):
+    elif cmp_ret == 1 and (req_ver['flags'] in ['LT', 'LE']):
         return True
-    elif cmp_ret == -1 and (req_ver['flags'] == 'GT' or req_ver['flags'] == 'GE'):
+    elif cmp_ret == -1 and (req_ver['flags'] in ['GT', 'GE']):
         return True
     return False
index 176313e..9cda680 100644 (file)
@@ -92,16 +92,16 @@ def main(argv):
             else:
                 logger.info('kickstart or recipes file is required')
         elif args.subparser_name == 'start':
-            tic_server.start(args.port)
+            tic_server.start(int(args.port))
         return 0
     except KeyboardInterrupt:
         ### handle keyboard interrupt ###
         return 0
     except error.TICError as err:
         logger.error(err)
-    except Exception as ex:
-        logger.error(ex)
-        return 2
+    except Exception as ex:
+        logger.error(ex)
+        return 2
     
 if __name__ == "__main__":
     log.setup('tic')