[TIC-CORE] supports new recipe 53/123253/1
authorChulwoo Shin <cw1.shin@samsung.com>
Wed, 5 Apr 2017 05:28:11 +0000 (14:28 +0900)
committerChulwoo Shin <cw1.shin@samsung.com>
Wed, 5 Apr 2017 05:28:11 +0000 (14:28 +0900)
- supports the recipe
- import/export recipe or kickstart file.

Change-Id: I20a7afbf6451da736034523d55121d33cd447144
Signed-off-by: Chulwoo Shin <cw1.shin@samsung.com>
tic/command.py
tic/config.py
tic/dependency.py
tic/parser/recipe_parser.py
tic/parser/repo_parser.py
tic/parser/view_parser.py
tic/server/tic_server.py
tic/utils/log.py

index 40a156b..9388869 100644 (file)
@@ -24,7 +24,7 @@ import logging
 import hashlib
 
 from tic.dependency import get_installed_packages
-from tic.parser.recipe_parser import get_default_recipe, convert_recipe_to_yaml
+from tic.parser.recipe_parser import default_recipe, RecipeParser
 from tic.parser.repo_parser import RepodataParser
 from tic.parser.view_parser import make_view_data
 from tic.utils.error import TICError
@@ -40,28 +40,19 @@ DEFAULT_CACHEDIR=configmgr.setting['tempdir']
 DEFAULT_ANALYSISDIR=os.path.join(DEFAULT_CACHEDIR, 'analysis')
 DEFAULT_KICKSTARTDIR=os.path.join(DEFAULT_CACHEDIR, 'kickstart')
 
-def analyze(repo_list, recipe_list=None):
+def analyze(recipe_list):
     logger = logging.getLogger(__name__)
-    if not repo_list and not recipe_list:
-        raise TICError('No repositories defined')
-    repos = []
-    recipe = None
-    #TODO Repository check
-    # using default recipe (Temporary Code)
-    if recipe_list and recipe_list[0] == 'default':
-        recipe = get_default_recipe()
-        for repo_url in recipe.get('Repositories'):
-            repos.append({'name': repo_url.get('Name'),
-                          'url': repo_url.get('Url')})
-    else:
-        number=1
-        for repo_url in repo_list:
-            repos.append({'name': 'repository_%d' % number,
-                          'url': repo_url})
-            number = number + 1
+    if not recipe_list:
+        logger.info('Use default recipe because there is no import data')
+        recipe_list = default_recipe.getDefaultParameter()
+    
+    recipe_parser = RecipeParser(recipe_list)
+    recipe_parser.parse()
+    recipe_info = recipe_parser.getMergedRecipe()
+    
     start_time = misc.get_timestamp()
     #Download repodata from repositories (Remote/Local)
-    repoinfo = get_repodata_from_repos(repos, DEFAULT_CACHEDIR)
+    repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
     logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
     
     checksum_list=[]
@@ -89,7 +80,7 @@ def analyze(repo_list, recipe_list=None):
     # Make a data for TIC (Tizen image creation)
     view_data = make_view_data(pkg_group)
     # analyze install-dependency
-    inst_packages = get_installed_packages(recipe, repoinfo, pkg_group)
+    inst_packages = get_installed_packages(recipe_info, repoinfo, pkg_group)
     logger.info('installed package: %d', len(inst_packages))
     logger.info('time to analyze dependency: %d ms', misc.get_timestamp() - start_time)
 
@@ -99,53 +90,59 @@ def analyze(repo_list, recipe_list=None):
                        'files': pkg_group.get('files'),
                        'groups': pkg_group.get('groups'),
                        'conflicts': pkg_group.get('conflicts')},
-              'repos': repos,
+              'recipes': recipe_parser.getRepositories(),
               'defaultpackages': inst_packages}
     return result
 
-def exports(export_type, recipe, packages, outdir, filename=None):
+def imports(recipe_list):
+    logger = logging.getLogger(__name__)
+    if not recipe_list:
+        logger.info('Use default recipe because there is no import data')
+        recipe_list = default_recipe.getDefaultParameter()
+    
+    recipe_parser = RecipeParser(recipe_list)
+    recipe_parser.parse()
+    result = {'recipes': recipe_parser.getRepositories()}
+    #result = {'imports': recipe_parser.getMergedRepositories()}
+    return result
+
+def exports(export_type, recipes, packages, outdir, filename=None):
     logger = logging.getLogger(__name__)
     #TODO validation should be checked before request
     if not export_type:
         export_type='ks'
         logger.info('set default export format(.ks)')
 
-    if not recipe:
-        raise TICError('No recipe defined')
+    if not recipes:
+        raise TICError(configmgr.message['recipes_not_define'])
     if not packages or type(packages) is not list:
-        raise TICError('No packages defined')
-
-    #TODO recipe parsing
-    # Temporary code for 1st prototype release
-    if recipe.get('name') == 'default':
-        recipe = get_default_recipe()
-        config = recipe.get('Configurations')[0]
-        for key in ['Default', config['Platform']]:
-            recipe[key]['Groups']=[]
-            recipe[key]['ExtraPackages']=[]
-        config['Groups']=[]
-        config['ExtraPackages'] = packages
-    else:
-        raise TICError('No recipes defined')
-    
-    # create the yaml
-    yaml_info = convert_recipe_to_yaml(recipe, DEFAULT_KICKSTARTDIR)
-    
-    # create kickstart(.ks) using kickstarter tool
-    options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
-    kswriter(options)
-    
-    # check whether the ks exists
-    baseline=recipe['Default'].get('Baseline')
-    ksname= ''.join([config.get('FileName'), '.ks'])
-    kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
-    if not os.path.exists(kspath):
-        raise TICError('No ks file was created from kickstarter')
+        raise TICError(configmgr.message['no_package_to_install'])
     
-    # copy the ks to output directory
-    output=copyfile(kspath, outdir, filename)
-    logger.info('copy the ks file from %s to dst:%s', kspath, output)
-    result = {'kspath':output, 'arch':config.get('Architecture')}
+    recipe_parser = RecipeParser(recipes)
+    recipe_parser.parse()
+    result = None
+    if export_type == 'recipe':
+        recipe_path = recipe_parser.export2Recipe(packages, outdir, filename)
+        logger.info('export the recipe to %s' % recipe_path)
+        result = {'path': recipe_path}
+    elif export_type == 'ks':
+        # 1. create yaml files
+        yaml_info = recipe_parser.export2Yaml(packages, DEFAULT_KICKSTARTDIR)
+        # 2. create kickstart(.ks) using kickstarter tool
+        options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
+        kswriter(options)
+        # check whether the ks exists
+        recipe_info = recipe_parser.getMergedRecipe()
+        baseline=recipe_info['Recipe'].get('Baseline')
+        ksname= ''.join([recipe_info['Recipe'].get('FileName'), '.ks'])
+        kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
+        logger.info('the kickstarter created the ks file (%s)' % kspath)
+        if not os.path.exists(kspath):
+            raise TICError('No ks file was created from kickstarter')
+        # copy the ks to output directory
+        output=copyfile(kspath, outdir, filename)
+        logger.info('export the ks to %s' % output)
+        result = {'path':output, 'arch':recipe_info['Recipe'].get('Architecture')}
     return result
 
 def createimage(recipes, ksfile, outdir):
index 0c6eb09..c83d1c8 100644 (file)
@@ -6,30 +6,37 @@ DEFAULT_MSG_CONF = "/etc/tic-core/message.conf"
 DEFAULT_CONF = "/etc/tic-core/config.conf"
 
 class ConfigMgr(object):
-    DEFAULT_MESSAGE = {'message': {
-                        'repo_not_found': "The repository url cannot be found (%s)",
-                        'xml_parse_error': "There was a problem parsing the %s, please check the file (%s)",
-                        'yaml_parse_error': "There was a problem parsing the %s, please check the file (%s)",
-                        'recipe_parse_error': "There was a problem parsing the recipe, please check the recipe file (%s)",
-                        'package_not_exist': "The default package(%s) does not exist.",
-                        'dependency_not_exist': "The %s needed by %s does not exist. should be checked for repository",
-                        'server_error': "there was a problem servicing your request. please try again later" }
+    DEFAULT_MESSAGE = {"message": {
+                        "repo_not_found": "The repository URL cannot be found (%s)",
+                        "recipe_not_found": "The recipe URL cannot be found (%s)",
+                        "xml_parse_error": "There was a problem parsing the %s, please check the file (%s)",
+                        "yaml_parse_error": "There was a problem parsing the %s, please check the file (%s)",
+                        "recipe_parse_error": "There was a problem parsing the recipe, please check the recipe file (%s)",
+                        "recipe_convert_error": "There was a problem converting this recipe, please check the recipes",
+                        "package_not_exist": "The default package(%s) does not exist.",
+                        "dependency_not_exist": "The %s needed by %s does not exist. should be checked for repository",
+                        "server_error": "there was a problem servicing your request. please try again later",
+                        "recipe_repo_not_exist": "%s repository does not exist in in the recipe",
+                        "default_recipe_use": "Use default recipe because there is no import data",
+                        "no_package_to_install": "No packages to install. Please select a package to install",
+                        "recipes_not_define": "Please define recipe for %s file creation"}
                        }
     
-    DEFAULT_TIC = {'setting': {
-                       'tempdir': '/var/tmp/tic-core',
-                       'cachedir': '/var/tmp/tic-core/cache',
-                       'logdir': '/var/tmp/tic-core/log'},
-                   'server': {
-                       'port': 8082},
-                   'regularexp': {
-                       'meta_prefix': "building-blocks",
-                       'meta_prefix_root': "building-blocks-root-",
-                       'meta_prefix_sub1': "building-blocks-sub1-",
-                       'meta_pattern': "-(?P<meta>root|sub1|sub2)-(?P<pkgname>.+)",    
-                       'meta_sub1_pattern': "(?P<root>.+)-(?P<sub1>.+)",
-                       'meta_sub2_pattern': "(?P<root>.+)-(?P<sub1>.+)-(?P<sub2>.+)",
-                       'profile_pattern': "(?P<pkgname>.+)-profile_(?P<profile>[^-]+)-?(?P<extra>.+)?"}
+    DEFAULT_TIC = {"setting": {
+                       "tempdir": "/var/tmp/tic-core",
+                       "cachedir": "/var/tmp/tic-core/cache",
+                       "logdir": "/var/tmp/tic-core/log",
+                       "default_recipe": "/etc/tic-core/recipe.yaml"},
+                   "server": {
+                       "port": 8082},
+                   "regularexp": {
+                       "meta_prefix": "building-blocks",
+                       "meta_prefix_root": "building-blocks-root-",
+                       "meta_prefix_sub1": "building-blocks-sub1-",
+                       "meta_pattern": "-(?P<meta>root|sub1|sub2)-(?P<pkgname>.+)",    
+                       "meta_sub1_pattern": "(?P<root>.+)-(?P<sub1>.+)",
+                       "meta_sub2_pattern": "(?P<root>.+)-(?P<sub1>.+)-(?P<sub2>.+)",
+                       "profile_pattern": "(?P<pkgname>.+)-profile_(?P<profile>[^-]+)-?(?P<extra>.+)?"}
                    }
     
     _instance = None
@@ -39,7 +46,7 @@ class ConfigMgr(object):
             cls._instance = super(ConfigMgr, cls).__new__(cls, *args, **kwargs)
         return cls._instance
     
-    def __init__(self):
+    def __init__(self):
         self._reset()
         for conf_path in [DEFAULT_CONF, DEFAULT_MSG_CONF]:
             self._setConfig(conf_path)
index 78b8232..c3d9de7 100644 (file)
@@ -303,7 +303,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
                         if not choose or _check_conflicts(choose):
                             logger.info('%s recommended by %s is ignored for selection (Conflict)' % (req['name'], pkg_info['name']))
                             continue
-                        
+
                     if choose:
                         if selected[choose['id']] == 0:
                             dep_set = _analyze_dep(choose)
@@ -412,21 +412,14 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
     if not recipe or not repoinfo:
         return []
     
-    default = recipe.get('Default')
-    config = recipe.get('Configurations')[0]
-    platform_name = config.get('Platform')
-    platform = recipe.get(platform_name)
-    
-    # check groups/extraPackages
     group_set = set([])
     pkg_set = set([])
-    for g in [default, platform, config]:
-        if g.has_key('Groups'):
-            group_set.update(g.get('Groups'))
-        if g.has_key('ExtraPackages'):
-            pkg_set.update(g.get('ExtraPackages'))
-    #group_dict = dict.fromkeys(group_set)
     
+    if recipe['Recipe'].get('Groups'):
+        group_set.update(recipe['Recipe'].get('Groups'))
+    if recipe['Recipe'].get('ExtraPackages'):
+        pkg_set.update(recipe['Recipe'].get('ExtraPackages'))
+
     # parsing group.xml
     if group_set:
         for repo in repoinfo:
index a830cb3..9c5c751 100644 (file)
 # Contributors:
 # - S-Core Co., Ltd
 
+import copy
+import os
+import yaml
+import urllib2
+import contextlib
 import collections
-from datetime import datetime
 import logging
-import os
-from tic.utils import error
+from datetime import datetime
+from tic.utils.error import TICError
 from tic.utils.file import write, make_dirs
-import yaml
 from tic.config import configmgr
 
+DUMMY_PLATFORM = 'DummyPlatform'
+DEFAULT_RECIPE_NAME = 'default_recipe'
+DEFAULT_RECIPE_PATH = configmgr.setting['default_recipe']
+RECIPE_EXTEND_FIELD = {'Repos', 'Groups', 'Repositories', 'Partitions', 'ExtraPackages', 'RemovePackages', 'PostScripts', 'NoChrootScripts'}
 
-def get_default_recipe():
-    recipe = dict(
-        Default=dict(
-            Baseline= 'tizen-3.0',
-            Active= True,
-            Mic2Options= '-f raw --fstab=uuid --copy-kernel --compress-disk-image=bz2 --generate-bmap',
-            Part='headless',
-            Language= 'en_US.UTF-8',
-            Keyboard= 'us',
-            Timezone= 'Asia/Seoul',
-            RootPass= 'tizen',
-            DefaultUser= 'guest',
-            DefaultUserPass= 'tizen',
-            BootLoader= True,
-            BootloaderAppend= "rw vga=current splash rootwait rootfstype=ext4 plymouth.enable=0",
-            BootloaderTimeout= 3,
-            BootloaderOptions= '--ptable=gpt --menus="install:Wipe and Install:systemd.unit=system-installer.service:test"',
-            StartX= False,
-            Desktop= 'None',
-            SaveRepos= False,
-            UserGroups= "audio,video"
-        ),
-        Wayland=dict(
-            Part='headless',
-            UserGroups='audio,video',
-            Groups=[],
-            PostScripts=[],
-            Repos= [],
-            NoChrootScripts=[]
-        ),
-        Configurations=[
-            dict(
-                Name='tizen-headless',
-                Architecture='armv7l',
-                Schedule= "*",
-                Active= True,
-                Platform= 'Wayland',
-                Part= 'headless',
-                Mic2Options= '-f loop --pack-to=@NAME@.tar.gz',
-                FileName= 'tizen-headless-tm1',
-                Repos=['tizen-unified', 'tizen-base'],
-                Groups=[],
-                ExtraPackages= [],
-                RemovePackages=[]
-            )
-        ],
-        Repositories=[
-            dict(Name='tizen-unified',
-                 Url='http://download.tizen.org/live/devel:/Tizen:/Unified/standard/',
-                 #Url='file://home/shinchulwoo/Repo/Unified',
-                 Options='--ssl_verify=no'),
-            dict(Name='tizen-base',
-                 Url='http://download.tizen.org/snapshots/tizen/base/latest/repos/arm/packages/',
-                 #Url='file://home/shinchulwoo/Repo/Base',
-                 Options='--ssl_verify=no')
-        ],
-        Partitions=[
-            dict(Name='headless',
-                 Contents='part / --size=2000 --ondisk mmcblk0p --fstype=ext4 --label=rootfs --extoptions=\"-J size=16\" \n\
-part /opt/ --size=1000 --ondisk mmcblk0p --fstype=ext4 --label=system-data --extoptions="-m 0" \n\
-part /boot/kernel/mod_tizen_tm1/lib/modules --size=12 --ondisk mmcblk0p --fstype=ext4 --label=modules \n')
-        ]
-    )
-    return recipe
+class DefaultRecipe(object):
+    DEFAULT_RECIPE = {'NoChrootScripts': [{'Contents': 'if [ -n "$IMG_NAME" ]; then\n    echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/tizen-release\n    echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/os-release\nfi\n',
+                                           'Name': 'buildname'}],
+          'Partitions': [{'Contents': 'part / --size=2000 --ondisk mmcblk0p --fstype=ext4 --label=rootfs --extoptions="-J size=16"\npart /opt/ --size=1000 --ondisk mmcblk0p --fstype=ext4 --label=system-data --extoptions="-m 0"\npart /boot/kernel/mod_tizen_tm1/lib/modules --size=12 --ondisk mmcblk0p --fstype=ext4 --label=modules\n',
+                          'Name': 'default-part'}],
+          'PostScripts': [{'Contents': '#!/bin/sh\necho "#################### generic-base.post ####################"\n\ntest ! -e /opt/var && mkdir -p /opt/var\ntest -d /var && cp -arf /var/* /opt/var/\nrm -rf /var\nln -snf opt/var /var\n\ntest ! -e /opt/usr/home && mkdir -p /opt/usr/home\ntest -d /home && cp -arf /home/* /opt/usr/home/\nrm -rf /home\nln -snf opt/usr/home /home\n\nbuild_ts=$(date -u +%s)\nbuild_date=$(date -u --date @$build_ts +%Y%m%d_%H%M%S)\nbuild_time=$(date -u --date @$build_ts +%H:%M:%S)\n\nsed -ri \\\n\t-e \'s|@BUILD_ID[@]|@BUILD_ID@|g\' \\\n\t-e "s|@BUILD_DATE[@]|$build_date|g" \\\n\t-e "s|@BUILD_TIME[@]|$build_time|g" \\\n\t-e "s|@BUILD_TS[@]|$build_ts|g" \\\n\t/etc/tizen-build.conf\n\n# setup systemd default target for user session\ncat <<\'EOF\' >>/usr/lib/systemd/user/default.target\n[Unit]\nDescription=User session default target\nEOF\nmkdir -p /usr/lib/systemd/user/default.target.wants\n\n# sdx: fix smack labels on /var/log\nchsmack -a \'*\' /var/log\n\n# create appfw dirs inside homes\nfunction generic_base_user_exists() {\n        user=$1\n        getent passwd | grep -q ^${user}:\n}\n\nfunction generic_base_user_home() {\n        user=$1\n        getent passwd | grep ^${user}: | cut -f6 -d\':\'\n}\n\nfunction generic_base_fix_user_homedir() {\n        user=$1\n        generic_base_user_exists $user || return 1\n\nhomedir=$(generic_base_user_home $user)\n        mkdir -p $homedir/apps_rw\n        for appdir in desktop manifest dbspace; do\n                mkdir -p $homedir/.applications/$appdir\n        done\n        find $homedir -type d -exec chsmack -a User {} \\;\n        chown -R $user:users $homedir\n        return 0\n}\n\n# fix TC-320 for SDK\n. /etc/tizen-build.conf\n[ "${TZ_BUILD_WITH_EMULATOR}" == "1" ] && generic_base_fix_user_homedir developer\n\n# Add info.ini for system-info CAPI (TC-2047)\n/etc/make_info_file.sh',
+                           'Name': 'generic-base'}],
+          'Recipe': {'Active': True,
+                     'Architecture': 'armv7l',
+                     'Baseline': 'tizen',
+                     'BootLoader': True,
+                     'BootloaderAppend': 'rw vga=current splash rootwait rootfstype=ext4 plymouth.enable=0',
+                     'BootloaderOptions': '--ptable=gpt --menus="install:Wipe and Install:systemd.unit=system-installer.service:test"',
+                     'BootloaderTimeout': 3,
+                     'DefaultUser': 'guest',
+                     'DefaultUserPass': 'tizen',
+                     'Desktop': 'None',
+                     'ExtraPackages': [],
+                     'FileName': 'default-armv7l',
+                     'Groups': [],
+                     'Keyboard': 'us',
+                     'Language': 'en_US.UTF-8',
+                     'Mic2Options': '-f raw --fstab=uuid --copy-kernel --compress-disk-image=bz2 --generate-bmap',
+                     'Name': 'default-recipe',
+                     'NoChrootScripts': ['buildname'],
+                     'Part': 'default-part',
+                     'PostScripts': ['generic-base'],
+                     'RemovePackages': [],
+                     'Repos': ['tizen_unified', 'tizen_base_armv7l'],
+                     'RootPass': 'tizen',
+                     'SaveRepos': False,
+                     'Schedule': '*',
+                     'StartX': False,
+                     'Timezone': 'Asia/Seoul',
+                     'UserGroups': 'audio,video'},
+          'Repositories': [{'Name': 'tizen_unified',
+                            'Options': '--ssl_verify=no',
+                            'Url': 'http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/'},
+                            #'Url': 'http://download.tizen.org/live/devel:/Tizen:/Unified/standard/'},
+                           {'Name': 'tizen_base_armv7l',
+                            'Options': '--ssl_verify=no',
+                            'Url': 'http://download.tizen.org/snapshots/tizen/base/latest/repos/arm/packages/'}]}
+    _instance = None
+    def __new__(cls, *args, **kwargs):
+        if not cls._instance:
+            cls._instance = super(DefaultRecipe, cls).__new__(cls, *args, **kwargs)
+        return cls._instance
+    def __init__(self):
+        logger = logging.getLogger(__name__)
+        if os.path.exists(DEFAULT_RECIPE_PATH):
+            try:
+                with file(DEFAULT_RECIPE_PATH) as f:
+                    self.DEFAULT_RECIPE = yaml.load(f)
+                    logger.info('Read default recipe from %s' % DEFAULT_RECIPE_PATH)
+            except IOError as err:
+                logger.info(err)
+            except yaml.YAMLError as err:
+                logger.info(err)
+    def getDefaultRecipe(self):
+        return copy.deepcopy(self.DEFAULT_RECIPE)
+    def getSystemConfig(self):
+        data = copy.deepcopy(self.DEFAULT_RECIPE)
+        for field in RECIPE_EXTEND_FIELD:
+            if field == 'Partitions':
+                continue
+            if data['Recipe'].get(field):
+                data['Recipe'][field] = []
+            if data.get(field):
+                data[field] = []
+        return data
+    def getDefaultParameter(self):
+        return [dict(url=DEFAULT_RECIPE_NAME, type='recipe')]
 
-def load_yaml(path):
-    logger = logging.getLogger(__name__)
-    try:
-        with file(path) as f:
-            return yaml.load(f)
-    except IOError as err:
-        logger(err)
-        raise error.TICError(configmgr.message['server_error'])
-    except yaml.YAMLError as err:
-        logger(err)
-        raise error.TICError(configmgr.message['recipe_parse_error'] % os.path.basename(path))
+default_recipe = DefaultRecipe()
+
+class RecipeParser(object):
+    def __init__(self, inputs):
+        # in order to priority definition
+        self.inputs = []
+        self.recipes = {}
+        self._repositories = None
+        self._recipe = None
+        # add recipe input
+        self.addRecipes(inputs)
+    
+    def parse(self):
+        logger = logging.getLogger(__name__)
+        if not self.inputs:
+            return
+        self._repositories = None
+        self._recipe = None
+        repo_count = 1
+        try:
+            for data in self.inputs:
+                data_type = data.get('type')
+                # type: recipe or repository
+                if data_type == 'recipe':
+                    # default recipe
+                    if data.get('url') == DEFAULT_RECIPE_NAME:
+                        self.recipes[data.get('url')] = default_recipe.getDefaultRecipe()
+                    else:
+                        with contextlib.closing(urllib2.urlopen(data.get('url'))) as op:
+                            self.recipes[data.get('url')] = yaml.load(op.read())
+                elif data_type == 'repository':
+                    data['name'] = 'repository_%s' % repo_count
+                    repo_count += 1
+        except urllib2.HTTPError as err:
+            if err.code == 404:
+                msg = configmgr.message['recipe_not_found'] % data.get('url')
+            else:
+                msg = str(err)
+            logger.error(err)
+            raise TICError(msg)
+        except urllib2.URLError as err:
+            logger.error(err)
+            raise TICError(configmgr.message['server_error'])
+        except yaml.YAMLError as err:
+            logger.error(err)
+            raise TICError(configmgr.message['recipe_parse_error'] % data.get('url'))
     
+    def addRecipes(self, inputs):
+        if inputs: 
+            if isinstance(inputs, list):
+                for data in inputs:
+                    self.inputs.append(data)
+            else:
+                self.inputs.append(inputs)
 
-def convert_recipe_to_yaml(recipe, filepath):
-    logger = logging.getLogger(__name__)
+    def getRepositories(self):
+        if not self._repositories:
+            self._repositories = self._getAllRepositories()
+        return self._repositories
+
+    def _getAllRepositories(self):
+        repos = []
+        name_count = 1
+        for data in self.inputs:
+            if data.get('type') == 'recipe':
+                recipe_repos = []
+                recipe_info = self.recipes[data['url']]
+                recipe_name = None
+                if recipe_info.get('Recipe'):
+                    if recipe_info['Recipe'].get('Name'):
+                        recipe_name = recipe_info['Recipe'].get('Name')
+                    if recipe_info['Recipe'].get('Repos'):
+                        for repo_name in recipe_info['Recipe'].get('Repos'):
+                            isExist = False
+                            if recipe_info.get('Repositories'):
+                                for repo_info in recipe_info.get('Repositories'):
+                                    if repo_info.get('Name') == repo_name:
+                                        recipe_repos.append(dict(name=repo_name,
+                                                                 url=repo_info.get('Url'),
+                                                                 options=repo_info.get('Options')))
+                                        isExist = True
+                                        break
+                            # repository does not exist
+                            if not isExist:
+                                raise TICError(configmgr.message['recipe_repo_not_exist'] % repo_name)
+                if not recipe_name:
+                    recipe_name = 'recipe_%s' % name_count
+                    name_count += 1
+                repos.append(dict(name=recipe_name,
+                                  url=data['url'],
+                                  repos=recipe_repos,
+                                  type='recipe'))
+            else:
+                repos.append(data)
+        return repos
     
-    # config.yaml
-    config = dict(Default=None, Configurations=[])
-    config['Default'] = recipe.get('Default')
-    # targets (only one target)
-    config['Configurations'].append(recipe.get('Configurations')[0])
-    platform_name = config['Configurations'][0].get('Platform')
-    config[platform_name] = recipe.get(platform_name)
+    def _renameRepository(self, repo_dict, repo_name):
+        number = repo_dict.get(repo_name)
+        new_name = ''.join([repo_name, '_', str(number)])
+        while(new_name in repo_dict):
+            number += 1
+            new_name = ''.join([repo_name, '_', str(number)])
+        repo_dict[repo_name] = number + 1
+        return new_name
     
-    dir_path = os.path.join(filepath, datetime.now().strftime('%Y%m%d%H%M%S%f'))
-    make_dirs(dir_path)
-    logger.info('kickstart cache dir=%s' % dir_path)
+    def getMergedRepositories(self):
+        result = []
+        repositories = self.getRepositories()
+        repo_name = {} # 'name': count
+        repo_url = {} # 'url': exist
+        for target in repositories:
+            if target.get('type') == 'recipe':
+                if target.get('repos'):
+                    for repo in target.get('repos'):
+                        # if repo's url is duplicated, remove it.
+                        if repo.get('url') in repo_url:
+                            continue
+                        # if repo's name is duplicated, rename it (postfix '_count')
+                        if repo.get('name') in repo_name:
+                            repo['name'] = self._renameRepository(repo_name, repo['name'])
+                        else:
+                            repo_name[repo['name']] = 1
+                        repo_url[repo['url']] = 1
+                        result.append(repo)
+                else:
+                    # recipe does not have repository information
+                    pass
+            elif(target.get('type') == 'repository'):
+                # if repo's url is duplicated, remove it.
+                if target.get('url') in repo_url:
+                    continue
+                if target['name'] in repo_name:
+                    target['name'] = self._renameRepository(repo_name, target['name'])
+                else:
+                    repo_name[target['name']] = 1
+                repo_url[target['url']] = 1
+                result.append(target)
+        return result
     
-    yamlinfo = YamlInfo(dir_path,
-                        os.path.join(dir_path, 'configs.yaml'),
-                        os.path.join(dir_path, 'repos.yaml'))
+    def getMergedRecipe(self):
+        if self._recipe:
+            return self._recipe
+
+        mergedInfo = default_recipe.getSystemConfig()
+        # merge recipe info
+        for i in xrange(len(self.inputs), 0, -1):
+            if self.inputs[i-1].get('type') == 'recipe':
+                recipe = self.recipes[self.inputs[i-1].get('url')]
+                if recipe.get('Recipe'):
+                    for k, v in recipe.get('Recipe').iteritems():
+                        if not v:
+                            continue
+                        if k in RECIPE_EXTEND_FIELD:
+                            if k == 'Repos':
+                                continue
+                            for j in xrange(len(v), 0, -1):
+                                mergedInfo['Recipe'][k].append(v[j-1])
+                        else:
+                            mergedInfo['Recipe'][k] = v
+                for fieldName in RECIPE_EXTEND_FIELD:
+                    if recipe.get(fieldName):
+                        if fieldName == 'Repositories':
+                            continue
+                        for data in recipe.get(fieldName):
+                            mergedInfo[fieldName].append(data)
+        # reverse order
+        for extName in RECIPE_EXTEND_FIELD:
+            if mergedInfo['Recipe'].get(extName):
+                mergedInfo['Recipe'][extName].reverse()
+            if mergedInfo.get(extName):
+                mergedInfo[extName].reverse()
+
+        # set repositories
+        mergedInfo['Repositories'] = self.getMergedRepositories()
+        if mergedInfo.get('Repositories'):
+            for repo in mergedInfo['Repositories']:
+                mergedInfo['Recipe']['Repos'].append(repo['name'])
+        return mergedInfo
     
-    # configs.yaml
-    with open(yamlinfo.configs, 'w') as outfile:
-        yaml.safe_dump(config, outfile, default_flow_style=False)
+    def export2Recipe(self, packages, outdir, filename='recipe.yaml'):
+        logger = logging.getLogger(__name__)
+        recipe = self.getMergedRecipe()
+        make_dirs(outdir)
+        reciep_path = os.path.join(outdir, filename)
+        # set packages
+        if packages:
+            recipe['Recipe']['ExtraPackages'] = packages
+        # set repositories
+        if 'Repositories' in recipe:
+            repos = []
+            for repo in recipe.get('Repositories'):
+                repos.append(dict(Name= repo.get('name'),
+                                  Url= repo.get('url'),
+                                  Options = repo.get('options')))
+            recipe['Repositories'] = repos
 
-    # repo.yaml
-    if 'Repositories' in recipe:
-        repos = {}
-        repos['Repositories'] = recipe['Repositories']
-        with open(yamlinfo.repos, 'w') as outfile:
-            yaml.safe_dump(repos, outfile, default_flow_style=False)
+        try:
+            with open(reciep_path, 'w') as outfile:
+                yaml.safe_dump(recipe, outfile, line_break="\n", width=1000, default_flow_style=False)
+                #outfile.write(stream.replace('\n', '\n\n'))
+                if not os.path.exists(reciep_path):
+                    raise TICError('No recipe file was created')
+        except IOError as err:
+            logger.info(err)
+            raise TICError('Could not read the recipe files')
+        except yaml.YAMLError as err:
+            logger.info(err)
+            raise TICError(configmgr.message['recipe_convert_error'])
+        return reciep_path
     
-    # partition info
-    if 'Partitions' in recipe:
-        for partition in recipe.get('Partitions'):
-            partition_path = os.path.join(dir_path, 'partitions')
-            file_name = partition.get('Name')
-            temp = os.path.join(partition_path, file_name)
-            write(temp, partition['Contents'])
+    def export2Yaml(self, packages, filepath):
+        logger = logging.getLogger(__name__)
+        recipe = self.getMergedRecipe()
+        # config.yaml
+        config = dict(Default=None, Configurations=[])
+        config['Default'] = recipe.get('Recipe')
+        if packages:
+            config['Default']['ExtraPackages'] = packages
+        # targets (only one target)
+        extraconfs = dict(Platform=DUMMY_PLATFORM, 
+                          ExtraPackages=[],
+                          Name= recipe['Recipe'].get('Name'),
+                          FileName= recipe['Recipe'].get('FileName'),
+                          Part= recipe['Recipe'].get('Part'))
+        config['Configurations'].append(extraconfs)
+        config[DUMMY_PLATFORM] = dict(ExtraPackages=[])
+        
+        dir_path = os.path.join(filepath, datetime.now().strftime('%Y%m%d%H%M%S%f'))
+        make_dirs(dir_path)
+        logger.info('kickstart cache dir=%s' % dir_path)
+        
+        yamlinfo = YamlInfo(dir_path,
+                            os.path.join(dir_path, 'configs.yaml'),
+                            os.path.join(dir_path, 'repos.yaml'))
+        
+        # configs.yaml
+        with open(yamlinfo.configs, 'w') as outfile:
+            yaml.safe_dump(config, outfile, default_flow_style=False)
     
-    # script.post
-    if 'PostScripts' in recipe:
-        for script in recipe.get('PostScripts'):
-            script_path = os.path.join(dir_path, 'scripts')
-            script_type = script.get('Type')
-            if script_type and script_type == 'nochroot':
-                file_name = '%s.nochroot' % script.get('Name')
-            else:
+        # repo.yaml
+        if 'Repositories' in recipe:
+            repos = dict(Repositories= [])
+            for repo in recipe.get('Repositories'):
+                repos['Repositories'].append(dict(Name= repo.get('name'),
+                                                  Url= repo.get('url'),
+                                                  Options = repo.get('options')))
+            with open(yamlinfo.repos, 'w') as outfile:
+                yaml.safe_dump(repos, outfile, default_flow_style=False)
+        
+        # partition info
+        if 'Partitions' in recipe:
+            for partition in recipe.get('Partitions'):
+                partition_path = os.path.join(dir_path, 'partitions')
+                file_name = partition.get('Name')
+                temp = os.path.join(partition_path, file_name)
+                write(temp, partition['Contents'])
+        
+        # script.post
+        if 'PostScripts' in recipe:
+            for script in recipe.get('PostScripts'):
+                script_path = os.path.join(dir_path, 'scripts')
                 file_name = '%s.post' % script.get('Name')
-            write(os.path.join(script_path, file_name), script['Contents'])
+                write(os.path.join(script_path, file_name), script['Contents'])
+        if 'NoChrootScripts' in recipe:
+            for script in recipe.get('NoChrootScripts'):
+                script_path = os.path.join(dir_path, 'scripts')
+                file_name = '%s.nochroot' % script.get('Name')
+                write(os.path.join(script_path, file_name), script['Contents'])
+        return yamlinfo
     
-    return yamlinfo
+def load_yaml(path):
+    logger = logging.getLogger(__name__)
+    try:
+        with file(path) as f:
+            return yaml.load(f)
+    except IOError as err:
+        logger.info(err)
+        raise TICError(configmgr.message['server_error'])
+    except yaml.YAMLError as err:
+        logger.info(err)
+        raise TICError(configmgr.message['recipe_parse_error'] % os.path.basename(path))
 
 YamlType = collections.namedtuple('YamlInfo', 'cachedir, configs, repos')
 def YamlInfo(cachedir, configs, repos):
-    return YamlType(cachedir, configs, repos)
\ No newline at end of file
+    return YamlType(cachedir, configs, repos)
+
+if __name__ == '__main__':
+    inputs = [{'url': DEFAULT_RECIPE_NAME, 'type': 'recipe'}, {'url': 'http://localhost/repo/recipe/recipe1.yaml', 'type': 'recipe'}]
+    parser = RecipeParser()
+    parser.addRecipes(inputs)
+    parser.parse()
+    print(parser.repositories)
index 69104e1..2ee2c8e 100644 (file)
@@ -120,8 +120,7 @@ class RepodataParser(object):
                 if requires_tag is not None:
                     dep_list = []
                     for rpm in requires_tag:
-                        require = {}
-                        require['name'] = rpm.attrib['name']
+                        require = dict(name= rpm.attrib['name'])
                         _set_version(require, rpm)
                         dep_list.append(require)
                     pkg_info['requires'] = dep_list;
@@ -129,8 +128,7 @@ class RepodataParser(object):
                 if provides_tag is not None:
                     dep_list = []
                     for rpm in provides_tag:
-                        provide = {}
-                        provide['name'] = rpm.attrib['name']
+                        provide = dict(name= rpm.attrib['name'])
                         _set_version(provide, rpm)
                         if provide.get('ver') and not provide.get('rel') and pkg_info['version']['rel']:
                             provide['rel'] = pkg_info['version']['rel'];
@@ -145,8 +143,7 @@ class RepodataParser(object):
                 if conflicts_tag is not None:
                     dep_list = []
                     for rpm in conflicts_tag:
-                        conflict = {}
-                        conflict['name'] = rpm.attrib['name']
+                        conflict = dict(name= rpm.attrib['name'])
                         _set_version(conflict, rpm)
                         dep_list.append(conflict)
                     pkg_info['conflicts'] = dep_list;
@@ -154,8 +151,7 @@ class RepodataParser(object):
                 if recommends_tag is not None:
                     dep_list = []
                     for rpm in recommends_tag:
-                        recommend = {}
-                        recommend['name'] = rpm.attrib['name']
+                        recommend = dict(name=rpm.attrib['name'])
                         _set_version(recommend, rpm)
                         dep_list.append(recommend)
                     pkg_info['recommends'] = dep_list;
@@ -163,7 +159,8 @@ class RepodataParser(object):
                 if suggests_tag is not None:
                     dep_list = []
                     for rpm in suggests_tag:
-                        dep_list.append(rpm.attrib['name'])
+                        suggests = dict(name= rpm.attrib['name'])
+                        dep_list.append(suggests)
                     pkg_info['suggests'] = dep_list;
                 file_tag = format_tag.find(tag_dic['file'])
                 if file_tag is not None:
index 4b45756..80ef325 100644 (file)
@@ -1,5 +1,4 @@
 import logging
-from tic.utils import misc
 from operator import itemgetter
 from tic.utils.rpmmisc import meetRequireVersion
 
@@ -29,9 +28,10 @@ def make_view_data(pkg_group):
         meta_nodes = meta_info.get('nodes')
         for child_meta in meta_nodes:
             set_meta_require(child_meta)
-            
+        
+        duplicate = set()
         pkg_info = pkg_dict[meta_info['metaname']]
-        for dep_tag in ['requires', 'recommends']:
+        for dep_tag in ['requires', 'recommends', 'suggests']:
             if pkg_info.get(dep_tag):
                 for req in pkg_info.get(dep_tag):
                     targets = []
@@ -48,10 +48,11 @@ def make_view_data(pkg_group):
                         
                     for pkg in targets:
                         # The meta-pkg of the other group are excluded.
-                        if not pkg.get('meta'):
+                        if not pkg.get('meta') and pkg['name'] not in duplicate:
                             refer_count[pkg['id']] += 1
+                            duplicate.add(pkg['name'])
                             meta_nodes.append(make_node(pkg))
-                
+        meta_nodes.sort()
     def make_node(pkg_info):
         #return dict(id=pkg_info['id'], text=pkg_info['name'], nodes=[])
         return dict(text=pkg_info['name'], nodes=[])
@@ -71,11 +72,11 @@ def make_view_data(pkg_group):
     files = pkg_group['files']
     refer_count = [0] * len(pkg_dict)
     
-    # sort by summary
+    # sort meta-pkg by pkt_name in ascending order
     meta_info = pkg_group.get('meta_info')
-    meta_info['root'] = sorted(meta_info['root'], key=itemgetter(1))
-    meta_info['sub1'] = sorted(meta_info['sub1'], key=itemgetter(2))
-    meta_info['sub2'] = sorted(meta_info['sub2'], key=itemgetter(3))
+    meta_info['root'] = sorted(meta_info['root'], key=itemgetter(0))
+    meta_info['sub1'] = sorted(meta_info['sub1'], key=itemgetter(0))
+    meta_info['sub2'] = sorted(meta_info['sub2'], key=itemgetter(0))
     
     # make tree of meta
     for root in meta_info['root']:
@@ -114,13 +115,13 @@ def make_view_data(pkg_group):
     
     #The remaining rpms are grouped into a MISC tree
     misc_info = {}
-    misc_info['text'] = 'MISC'
+    misc_info['text'] = 'ETC'
     misc_info['nodes'] = []
     for k, v in pkg_dict.iteritems():
         # Pkg is not referenced from Meta-pkg
         if not v.get('meta') and refer_count[v['id']] == 0:
             misc_info['nodes'].append(make_node(v))
 
-    logger.info('meta: %d, misc: %d', len(view_ref), len(misc_info['nodes']))
     view_data.append(misc_info)
+    logger.info('meta: %d, misc: %d', len(view_ref), len(misc_info['nodes']))
     return view_data
\ No newline at end of file
index 54881e4..6b8c58c 100644 (file)
@@ -8,7 +8,6 @@ import os
 import logging
 from tic import command
 from tic.utils import error
-from tic.config import configmgr
 
 app = Flask(__name__)
 
@@ -24,8 +23,9 @@ def analysis():
     try:
         logger = logging.getLogger(__name__)
         logger.info('%s - %s %s : data=%s' % (request.remote_addr, request.method, request.path, request.data))
-        repo_info = json.loads(request.data)
-        view_data = command.analyze(repo_info.get('repos'), repo_info.get('recipes'))
+        target_info = json.loads(request.data)
+        view_data = command.analyze(target_info.get('recipes'))
+        #view_data = command.analyze(None)
         resp = makeresponse(view_data, None)
     except error.TICError as err:
         logger.error(err)
@@ -33,6 +33,23 @@ def analysis():
     except ValueError as ve:
         logger.error(ve)
         resp = makeresponse(str(ve), ve)
+#     except Exception as ex:
+#         logger.error(ex)
+#         resp = makeresponse(str(ex), ex)
+    
+    return resp
+
+@app.route('/imports', methods=['POST'])
+def imports():
+    try:
+        logger = logging.getLogger(__name__)
+        logger.info('%s - %s %s : data=%s' % (request.remote_addr, request.method, request.path, request.data))
+        repo_info = json.loads(request.data)
+        view_data = command.imports(repo_info.get('recipes'))
+        resp = makeresponse(view_data, None)
+    except error.TICError as err:
+        logger.error(err)
+        resp = makeresponse(str(err), err)
     except Exception as ex:
         logger.error(ex)
         resp = makeresponse(str(ex), ex)
@@ -46,7 +63,7 @@ def exports():
         logger.info('%s - %s %s : data=%s' % (request.remote_addr, request.method, request.path, request.data))
         exportInfo = json.loads(request.data)
         export_type = request.args.get('format')
-        output = command.exports(export_type, exportInfo.get('recipe'), exportInfo.get('packages'), exportInfo.get('outdir'), exportInfo.get('filename'))
+        output = command.exports(export_type, exportInfo.get('recipes'), exportInfo.get('packages'), exportInfo.get('outdir'), exportInfo.get('filename'))
         resp = makeresponse(output, None)
     except error.TICError as err:
         logger.error(err)
@@ -54,9 +71,9 @@ def exports():
     except ValueError as ve:
         logger.error(ve)
         resp = makeresponse(str(ve), ve)
-    except Exception as ex:
-        logger.error(ex)
-        resp = makeresponse(str(ex), ex)
+    except Exception as ex:
+        logger.error(ex)
+        resp = makeresponse(str(ex), ex)
     return resp
 
 
@@ -67,12 +84,12 @@ def start(port_num=8082):
     with app.test_request_context():
         print(url_for('index'))
         print(url_for('analysis'))
+        print(url_for('imports'))
         print(url_for('exports'))
     if isinstance(port_num, (str, unicode)):
         port_num = int(port_num)
     app.run(host='0.0.0.0', threaded=True, port=port_num)
 
-
 def makeresponse(data, err):
     status = 200
     if err:
index d901386..370f1bb 100644 (file)
@@ -30,7 +30,7 @@ def setup(root):
     logger = logging.getLogger(root)
     logger.setLevel(logging.DEBUG)
     
-    formatter = logging.Formatter('%(asctime)s [%(levelname)s][%(filename)s(%(lineno)s)] %(message)s')
+    formatter = logging.Formatter('%(asctime)s [%(levelname)s][%(filename)s(%(lineno)s)-%(funcName)s()] %(message)s')
     formatter.datefmt = '%Y-%m-%d %H:%M:%S'
     
     mkdir_p(TIC_LOG_DIR)