[TIC-CORE] change the license from apache 2.0 to flora 1.1
[archive/20170607/tools/tic-core.git] / tic / command.py
index 65570d1..29d2245 100644 (file)
+#!/usr/bin/python
+# Copyright (c) 2016 Samsung Electronics Co., Ltd
+#
+# Licensed under the Flora License, Version 1.1 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://floralicense.org/license/
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Contributors:
+# - S-Core Co., Ltd
+
+import os
 import logging
+import hashlib
 
-from tic.dependency import analyze_dependency, get_installed_packages
-from tic.parser.recipe_parser import get_default_recipe
+from tic.dependency import get_installed_packages
+from tic.parser.recipe_parser import default_recipe, RecipeParser
 from tic.parser.repo_parser import RepodataParser
 from tic.parser.view_parser import make_view_data
-
 from tic.utils.error import TICError
+from tic.utils.file import copyfile
 from tic.repo import get_repodata_from_repos
+from tic.pykickstarter import KSoption, kswriter
+from tic.utils import process
+from tic.utils import misc
+from tic.utils import file
+from tic.config import configmgr
 
-DEFAULT_CACHEDIR='/var/tmp/tic-core/cached'
+DEFAULT_CACHEDIR=configmgr.setting['tempdir']
+DEFAULT_ANALYSISDIR=os.path.join(DEFAULT_CACHEDIR, 'analysis')
+DEFAULT_KICKSTARTDIR=os.path.join(DEFAULT_CACHEDIR, 'kickstart')
 
-def analyze(repo_list, recipe_list=None):
+def analyze(recipe_list):
     logger = logging.getLogger(__name__)
+    if not recipe_list:
+        logger.info('Use default recipe because there is no import data')
+        recipe_list = default_recipe.getDefaultParameter()
     
-    if not repo_list and not recipe_list:
-        raise TICError('No repositories defined')
-    
-    repos = []
-    recipe = None
-    #TODO Repository check
-    # using default recipe (Temporary Code)
-    if recipe_list and recipe_list[0] == 'default':
-        recipe = get_default_recipe()
-        for repo_url in recipe.get('Repositories'):
-            repos.append({'name': repo_url.get('Name'), 
-                          'url': repo_url.get('Url')})
-    else:
-        number=1
-        for repo_url in repo_list:
-            repos.append({'name': 'repository_%d' % number, 
-                          'url': repo_url})
-            number = number + 1
+    recipe_parser = RecipeParser(recipe_list)
+    recipe_parser.parse()
+    recipe_info = recipe_parser.getMergedRecipe()
     
+    start_time = misc.get_timestamp()
     #Download repodata from repositories (Remote/Local)
-    repoinfo = get_repodata_from_repos(repos, DEFAULT_CACHEDIR)
+    repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
+    logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
     
-    # Parse the xml files for the analysis of package (.rpm)
-    repo_parser = RepodataParser(repoinfo)
-    pkg_group = repo_parser.parse()
-    logger.info('pkg_list: %d, pkg2id: %d', len(pkg_group['pkg_list']), len(pkg_group['pkg2id']))
-    
-    # package install-dependency analysis
-    analyze_dependency(pkg_group)
+    checksum_list=[]
+    for repo in repoinfo:
+        checksum_list.append(repo['checksum'])
+    all_checksum = hashlib.sha256('_'.join(checksum_list)).hexdigest()
+    analysis_file=os.path.join(DEFAULT_ANALYSISDIR, all_checksum, 'analysis.json')
+    pkg_group=None
+    if os.path.exists(analysis_file):
+        pkg_group=file.read_json(analysis_file)
+
+    if not pkg_group or not pkg_group.get('pkg_dict'):
+        start_time = misc.get_timestamp()
+        # Parse the xml files for the analysis of package (.rpm)
+        repo_parser = RepodataParser('armv7l', repoinfo)
+        pkg_group = repo_parser.parse()
+        logger.info('packages: %d, provides: %d, files: %d', len(pkg_group['pkg_dict']), len(pkg_group['provides']), len(pkg_group['files']))
+        logger.info('time to parse repodata: %d ms', misc.get_timestamp() - start_time)
+        # dump to cached file
+        file.write_json_flock(analysis_file, pkg_group)
+    else:
+        logger.info('use a cache parsing data - %s', analysis_file)
+
+    start_time = misc.get_timestamp()
     # Make a data for TIC (Tizen image creation)
     view_data = make_view_data(pkg_group)
-    inst_packages = get_installed_packages(recipe, repoinfo, pkg_group)
-    
-    result = {'packages': view_data,
-              'repos': repos,
+    # analyze install-dependency
+    inst_packages = get_installed_packages(recipe_info, repoinfo, pkg_group)
+    logger.info('installed package: %d', len(inst_packages))
+    logger.info('time to analyze dependency: %d ms', misc.get_timestamp() - start_time)
+
+    result = {'view': view_data,
+              'data': {'packages': pkg_group.get('pkg_dict'),
+                       'provides': pkg_group.get('provides'),
+                       'files': pkg_group.get('files'),
+                       'groups': pkg_group.get('groups'),
+                       'conflicts': pkg_group.get('conflicts')},
+              'recipes': recipe_parser.getRepositories(),
               'defaultpackages': inst_packages}
+    return result
+
+def imports(recipe_list):
+    logger = logging.getLogger(__name__)
+    if not recipe_list:
+        logger.info('Use default recipe because there is no import data')
+        recipe_list = default_recipe.getDefaultParameter()
+    
+    recipe_parser = RecipeParser(recipe_list)
+    recipe_parser.parse()
+    result = {'recipes': recipe_parser.getRepositories()}
+    #result = {'imports': recipe_parser.getMergedRepositories()}
+    return result
+
+def exports(export_type, recipes, packages, outdir, filename=None):
+    logger = logging.getLogger(__name__)
+    #TODO validation should be checked before request
+    if not export_type:
+        export_type='ks'
+        logger.info('set default export format(.ks)')
+
+    if not recipes:
+        raise TICError(configmgr.message['recipes_not_define'])
+    if not packages or type(packages) is not list:
+        raise TICError(configmgr.message['no_package_to_install'])
+    
+    recipe_parser = RecipeParser(recipes)
+    recipe_parser.parse()
+    result = None
+    if export_type == 'recipe':
+        recipe_path = recipe_parser.export2Recipe(packages, outdir, filename)
+        logger.info('export the recipe to %s' % recipe_path)
+        result = {'path': recipe_path}
+    elif export_type == 'ks':
+        # 1. create yaml files
+        yaml_info = recipe_parser.export2Yaml(packages, DEFAULT_KICKSTARTDIR)
+        # 2. create kickstart(.ks) using kickstarter tool
+        options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
+        kswriter(options)
+        # check whether the ks exists
+        recipe_info = recipe_parser.getMergedRecipe()
+        baseline=recipe_info['Recipe'].get('Baseline')
+        ksname= ''.join([recipe_info['Recipe'].get('FileName'), '.ks'])
+        kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
+        logger.info('the kickstarter created the ks file (%s)' % kspath)
+        if not os.path.exists(kspath):
+            raise TICError('No ks file was created from kickstarter')
+        # copy the ks to output directory
+        output=copyfile(kspath, outdir, filename)
+        logger.info('export the ks to %s' % output)
+        result = {'path':output, 'arch':recipe_info['Recipe'].get('Architecture')}
+    return result
+
+def createimage(recipes, ksfile, outdir):
+    logger = logging.getLogger(__name__)
+    
+    if recipes:
+        logger.info('the recipes option is not yet supported')
+        return
+    
+    if not os.path.exists(ksfile) or os.path.isdir(ksfile):
+        raise TICError('kickstart file does not exist')
+    
+    mic_command=['mic', 'cr', 'auto', ksfile]
+    if outdir:
+        mic_command.append('--outdir=%s' % outdir)
     
-    return result
\ No newline at end of file
+    process.run(mic_command, 2)