import hashlib
from tic.dependency import get_installed_packages
-from tic.parser.recipe_parser import get_default_recipe, convert_recipe_to_yaml
+from tic.parser.recipe_parser import default_recipe, RecipeParser
from tic.parser.repo_parser import RepodataParser
from tic.parser.view_parser import make_view_data
from tic.utils.error import TICError
DEFAULT_ANALYSISDIR=os.path.join(DEFAULT_CACHEDIR, 'analysis')
DEFAULT_KICKSTARTDIR=os.path.join(DEFAULT_CACHEDIR, 'kickstart')
-def analyze(repo_list, recipe_list=None):
+def analyze(recipe_list):
logger = logging.getLogger(__name__)
- if not repo_list and not recipe_list:
- raise TICError('No repositories defined')
- repos = []
- recipe = None
- #TODO Repository check
- # using default recipe (Temporary Code)
- if recipe_list and recipe_list[0] == 'default':
- recipe = get_default_recipe()
- for repo_url in recipe.get('Repositories'):
- repos.append({'name': repo_url.get('Name'),
- 'url': repo_url.get('Url')})
- else:
- number=1
- for repo_url in repo_list:
- repos.append({'name': 'repository_%d' % number,
- 'url': repo_url})
- number = number + 1
+ if not recipe_list:
+ logger.info('Use default recipe because there is no import data')
+ recipe_list = default_recipe.getDefaultParameter()
+
+ recipe_parser = RecipeParser(recipe_list)
+ recipe_parser.parse()
+ recipe_info = recipe_parser.getMergedRecipe()
+
start_time = misc.get_timestamp()
#Download repodata from repositories (Remote/Local)
- repoinfo = get_repodata_from_repos(repos, DEFAULT_CACHEDIR)
+ repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
checksum_list=[]
# Make a data for TIC (Tizen image creation)
view_data = make_view_data(pkg_group)
# analyze install-dependency
- inst_packages = get_installed_packages(recipe, repoinfo, pkg_group)
+ inst_packages = get_installed_packages(recipe_info, repoinfo, pkg_group)
logger.info('installed package: %d', len(inst_packages))
logger.info('time to analyze dependency: %d ms', misc.get_timestamp() - start_time)
'files': pkg_group.get('files'),
'groups': pkg_group.get('groups'),
'conflicts': pkg_group.get('conflicts')},
- 'repos': repos,
+ 'recipes': recipe_parser.getRepositories(),
'defaultpackages': inst_packages}
return result
-def exports(export_type, recipe, packages, outdir, filename=None):
+def imports(recipe_list):
+ logger = logging.getLogger(__name__)
+ if not recipe_list:
+ logger.info('Use default recipe because there is no import data')
+ recipe_list = default_recipe.getDefaultParameter()
+
+ recipe_parser = RecipeParser(recipe_list)
+ recipe_parser.parse()
+ result = {'recipes': recipe_parser.getRepositories()}
+ #result = {'imports': recipe_parser.getMergedRepositories()}
+ return result
+
+def exports(export_type, recipes, packages, outdir, filename=None):
logger = logging.getLogger(__name__)
#TODO validation should be checked before request
if not export_type:
export_type='ks'
logger.info('set default export format(.ks)')
- if not recipe:
- raise TICError('No recipe defined')
+ if not recipes:
+ raise TICError(configmgr.message['recipes_not_define'])
if not packages or type(packages) is not list:
- raise TICError('No packages defined')
-
- #TODO recipe parsing
- # Temporary code for 1st prototype release
- if recipe.get('name') == 'default':
- recipe = get_default_recipe()
- config = recipe.get('Configurations')[0]
- for key in ['Default', config['Platform']]:
- recipe[key]['Groups']=[]
- recipe[key]['ExtraPackages']=[]
- config['Groups']=[]
- config['ExtraPackages'] = packages
- else:
- raise TICError('No recipes defined')
-
- # create the yaml
- yaml_info = convert_recipe_to_yaml(recipe, DEFAULT_KICKSTARTDIR)
-
- # create kickstart(.ks) using kickstarter tool
- options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
- kswriter(options)
-
- # check whether the ks exists
- baseline=recipe['Default'].get('Baseline')
- ksname= ''.join([config.get('FileName'), '.ks'])
- kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
- if not os.path.exists(kspath):
- raise TICError('No ks file was created from kickstarter')
+ raise TICError(configmgr.message['no_package_to_install'])
- # copy the ks to output directory
- output=copyfile(kspath, outdir, filename)
- logger.info('copy the ks file from %s to dst:%s', kspath, output)
- result = {'kspath':output, 'arch':config.get('Architecture')}
+ recipe_parser = RecipeParser(recipes)
+ recipe_parser.parse()
+ result = None
+ if export_type == 'recipe':
+ recipe_path = recipe_parser.export2Recipe(packages, outdir, filename)
+ logger.info('export the recipe to %s' % recipe_path)
+ result = {'path': recipe_path}
+ elif export_type == 'ks':
+ # 1. create yaml files
+ yaml_info = recipe_parser.export2Yaml(packages, DEFAULT_KICKSTARTDIR)
+ # 2. create kickstart(.ks) using kickstarter tool
+ options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
+ kswriter(options)
+ # check whether the ks exists
+ recipe_info = recipe_parser.getMergedRecipe()
+ baseline=recipe_info['Recipe'].get('Baseline')
+ ksname= ''.join([recipe_info['Recipe'].get('FileName'), '.ks'])
+ kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
+ logger.info('the kickstarter created the ks file (%s)' % kspath)
+ if not os.path.exists(kspath):
+ raise TICError('No ks file was created from kickstarter')
+ # copy the ks to output directory
+ output=copyfile(kspath, outdir, filename)
+ logger.info('export the ks to %s' % output)
+ result = {'path':output, 'arch':recipe_info['Recipe'].get('Architecture')}
return result
def createimage(recipes, ksfile, outdir):
DEFAULT_CONF = "/etc/tic-core/config.conf"
class ConfigMgr(object):
- DEFAULT_MESSAGE = {'message': {
- 'repo_not_found': "The repository url cannot be found (%s)",
- 'xml_parse_error': "There was a problem parsing the %s, please check the file (%s)",
- 'yaml_parse_error': "There was a problem parsing the %s, please check the file (%s)",
- 'recipe_parse_error': "There was a problem parsing the recipe, please check the recipe file (%s)",
- 'package_not_exist': "The default package(%s) does not exist.",
- 'dependency_not_exist': "The %s needed by %s does not exist. should be checked for repository",
- 'server_error': "there was a problem servicing your request. please try again later" }
+ DEFAULT_MESSAGE = {"message": {
+ "repo_not_found": "The repository URL cannot be found (%s)",
+ "recipe_not_found": "The recipe URL cannot be found (%s)",
+ "xml_parse_error": "There was a problem parsing the %s, please check the file (%s)",
+ "yaml_parse_error": "There was a problem parsing the %s, please check the file (%s)",
+ "recipe_parse_error": "There was a problem parsing the recipe, please check the recipe file (%s)",
+ "recipe_convert_error": "There was a problem converting this recipe, please check the recipes",
+ "package_not_exist": "The default package(%s) does not exist.",
+ "dependency_not_exist": "The %s needed by %s does not exist. should be checked for repository",
+ "server_error": "there was a problem servicing your request. please try again later",
+ "recipe_repo_not_exist": "%s repository does not exist in in the recipe",
+ "default_recipe_use": "Use default recipe because there is no import data",
+ "no_package_to_install": "No packages to install. Please select a package to install",
+ "recipes_not_define": "Please define recipe for %s file creation"}
}
- DEFAULT_TIC = {'setting': {
- 'tempdir': '/var/tmp/tic-core',
- 'cachedir': '/var/tmp/tic-core/cache',
- 'logdir': '/var/tmp/tic-core/log'},
- 'server': {
- 'port': 8082},
- 'regularexp': {
- 'meta_prefix': "building-blocks",
- 'meta_prefix_root': "building-blocks-root-",
- 'meta_prefix_sub1': "building-blocks-sub1-",
- 'meta_pattern': "-(?P<meta>root|sub1|sub2)-(?P<pkgname>.+)",
- 'meta_sub1_pattern': "(?P<root>.+)-(?P<sub1>.+)",
- 'meta_sub2_pattern': "(?P<root>.+)-(?P<sub1>.+)-(?P<sub2>.+)",
- 'profile_pattern': "(?P<pkgname>.+)-profile_(?P<profile>[^-]+)-?(?P<extra>.+)?"}
+ DEFAULT_TIC = {"setting": {
+ "tempdir": "/var/tmp/tic-core",
+ "cachedir": "/var/tmp/tic-core/cache",
+ "logdir": "/var/tmp/tic-core/log",
+ "default_recipe": "/etc/tic-core/recipe.yaml"},
+ "server": {
+ "port": 8082},
+ "regularexp": {
+ "meta_prefix": "building-blocks",
+ "meta_prefix_root": "building-blocks-root-",
+ "meta_prefix_sub1": "building-blocks-sub1-",
+ "meta_pattern": "-(?P<meta>root|sub1|sub2)-(?P<pkgname>.+)",
+ "meta_sub1_pattern": "(?P<root>.+)-(?P<sub1>.+)",
+ "meta_sub2_pattern": "(?P<root>.+)-(?P<sub1>.+)-(?P<sub2>.+)",
+ "profile_pattern": "(?P<pkgname>.+)-profile_(?P<profile>[^-]+)-?(?P<extra>.+)?"}
}
_instance = None
cls._instance = super(ConfigMgr, cls).__new__(cls, *args, **kwargs)
return cls._instance
- def __init__(self, ):
+ def __init__(self):
self._reset()
for conf_path in [DEFAULT_CONF, DEFAULT_MSG_CONF]:
self._setConfig(conf_path)
# Contributors:
# - S-Core Co., Ltd
+import copy
+import os
+import yaml
+import urllib2
+import contextlib
import collections
-from datetime import datetime
import logging
-import os
-from tic.utils import error
+from datetime import datetime
+from tic.utils.error import TICError
from tic.utils.file import write, make_dirs
-import yaml
from tic.config import configmgr
+DUMMY_PLATFORM = 'DummyPlatform'
+DEFAULT_RECIPE_NAME = 'default_recipe'
+DEFAULT_RECIPE_PATH = configmgr.setting['default_recipe']
+RECIPE_EXTEND_FIELD = {'Repos', 'Groups', 'Repositories', 'Partitions', 'ExtraPackages', 'RemovePackages', 'PostScripts', 'NoChrootScripts'}
-def get_default_recipe():
- recipe = dict(
- Default=dict(
- Baseline= 'tizen-3.0',
- Active= True,
- Mic2Options= '-f raw --fstab=uuid --copy-kernel --compress-disk-image=bz2 --generate-bmap',
- Part='headless',
- Language= 'en_US.UTF-8',
- Keyboard= 'us',
- Timezone= 'Asia/Seoul',
- RootPass= 'tizen',
- DefaultUser= 'guest',
- DefaultUserPass= 'tizen',
- BootLoader= True,
- BootloaderAppend= "rw vga=current splash rootwait rootfstype=ext4 plymouth.enable=0",
- BootloaderTimeout= 3,
- BootloaderOptions= '--ptable=gpt --menus="install:Wipe and Install:systemd.unit=system-installer.service:test"',
- StartX= False,
- Desktop= 'None',
- SaveRepos= False,
- UserGroups= "audio,video"
- ),
- Wayland=dict(
- Part='headless',
- UserGroups='audio,video',
- Groups=[],
- PostScripts=[],
- Repos= [],
- NoChrootScripts=[]
- ),
- Configurations=[
- dict(
- Name='tizen-headless',
- Architecture='armv7l',
- Schedule= "*",
- Active= True,
- Platform= 'Wayland',
- Part= 'headless',
- Mic2Options= '-f loop --pack-to=@NAME@.tar.gz',
- FileName= 'tizen-headless-tm1',
- Repos=['tizen-unified', 'tizen-base'],
- Groups=[],
- ExtraPackages= [],
- RemovePackages=[]
- )
- ],
- Repositories=[
- dict(Name='tizen-unified',
- Url='http://download.tizen.org/live/devel:/Tizen:/Unified/standard/',
- #Url='file://home/shinchulwoo/Repo/Unified',
- Options='--ssl_verify=no'),
- dict(Name='tizen-base',
- Url='http://download.tizen.org/snapshots/tizen/base/latest/repos/arm/packages/',
- #Url='file://home/shinchulwoo/Repo/Base',
- Options='--ssl_verify=no')
- ],
- Partitions=[
- dict(Name='headless',
- Contents='part / --size=2000 --ondisk mmcblk0p --fstype=ext4 --label=rootfs --extoptions=\"-J size=16\" \n\
-part /opt/ --size=1000 --ondisk mmcblk0p --fstype=ext4 --label=system-data --extoptions="-m 0" \n\
-part /boot/kernel/mod_tizen_tm1/lib/modules --size=12 --ondisk mmcblk0p --fstype=ext4 --label=modules \n')
- ]
- )
- return recipe
+class DefaultRecipe(object):
+ DEFAULT_RECIPE = {'NoChrootScripts': [{'Contents': 'if [ -n "$IMG_NAME" ]; then\n echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/tizen-release\n echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/os-release\nfi\n',
+ 'Name': 'buildname'}],
+ 'Partitions': [{'Contents': 'part / --size=2000 --ondisk mmcblk0p --fstype=ext4 --label=rootfs --extoptions="-J size=16"\npart /opt/ --size=1000 --ondisk mmcblk0p --fstype=ext4 --label=system-data --extoptions="-m 0"\npart /boot/kernel/mod_tizen_tm1/lib/modules --size=12 --ondisk mmcblk0p --fstype=ext4 --label=modules\n',
+ 'Name': 'default-part'}],
+ 'PostScripts': [{'Contents': '#!/bin/sh\necho "#################### generic-base.post ####################"\n\ntest ! -e /opt/var && mkdir -p /opt/var\ntest -d /var && cp -arf /var/* /opt/var/\nrm -rf /var\nln -snf opt/var /var\n\ntest ! -e /opt/usr/home && mkdir -p /opt/usr/home\ntest -d /home && cp -arf /home/* /opt/usr/home/\nrm -rf /home\nln -snf opt/usr/home /home\n\nbuild_ts=$(date -u +%s)\nbuild_date=$(date -u --date @$build_ts +%Y%m%d_%H%M%S)\nbuild_time=$(date -u --date @$build_ts +%H:%M:%S)\n\nsed -ri \\\n\t-e \'s|@BUILD_ID[@]|@BUILD_ID@|g\' \\\n\t-e "s|@BUILD_DATE[@]|$build_date|g" \\\n\t-e "s|@BUILD_TIME[@]|$build_time|g" \\\n\t-e "s|@BUILD_TS[@]|$build_ts|g" \\\n\t/etc/tizen-build.conf\n\n# setup systemd default target for user session\ncat <<\'EOF\' >>/usr/lib/systemd/user/default.target\n[Unit]\nDescription=User session default target\nEOF\nmkdir -p /usr/lib/systemd/user/default.target.wants\n\n# sdx: fix smack labels on /var/log\nchsmack -a \'*\' /var/log\n\n# create appfw dirs inside homes\nfunction generic_base_user_exists() {\n user=$1\n getent passwd | grep -q ^${user}:\n}\n\nfunction generic_base_user_home() {\n user=$1\n getent passwd | grep ^${user}: | cut -f6 -d\':\'\n}\n\nfunction generic_base_fix_user_homedir() {\n user=$1\n generic_base_user_exists $user || return 1\n\nhomedir=$(generic_base_user_home $user)\n mkdir -p $homedir/apps_rw\n for appdir in desktop manifest dbspace; do\n mkdir -p $homedir/.applications/$appdir\n done\n find $homedir -type d -exec chsmack -a User {} \\;\n chown -R $user:users $homedir\n return 0\n}\n\n# fix TC-320 for SDK\n. /etc/tizen-build.conf\n[ "${TZ_BUILD_WITH_EMULATOR}" == "1" ] && generic_base_fix_user_homedir developer\n\n# Add info.ini for system-info CAPI (TC-2047)\n/etc/make_info_file.sh',
+ 'Name': 'generic-base'}],
+ 'Recipe': {'Active': True,
+ 'Architecture': 'armv7l',
+ 'Baseline': 'tizen',
+ 'BootLoader': True,
+ 'BootloaderAppend': 'rw vga=current splash rootwait rootfstype=ext4 plymouth.enable=0',
+ 'BootloaderOptions': '--ptable=gpt --menus="install:Wipe and Install:systemd.unit=system-installer.service:test"',
+ 'BootloaderTimeout': 3,
+ 'DefaultUser': 'guest',
+ 'DefaultUserPass': 'tizen',
+ 'Desktop': 'None',
+ 'ExtraPackages': [],
+ 'FileName': 'default-armv7l',
+ 'Groups': [],
+ 'Keyboard': 'us',
+ 'Language': 'en_US.UTF-8',
+ 'Mic2Options': '-f raw --fstab=uuid --copy-kernel --compress-disk-image=bz2 --generate-bmap',
+ 'Name': 'default-recipe',
+ 'NoChrootScripts': ['buildname'],
+ 'Part': 'default-part',
+ 'PostScripts': ['generic-base'],
+ 'RemovePackages': [],
+ 'Repos': ['tizen_unified', 'tizen_base_armv7l'],
+ 'RootPass': 'tizen',
+ 'SaveRepos': False,
+ 'Schedule': '*',
+ 'StartX': False,
+ 'Timezone': 'Asia/Seoul',
+ 'UserGroups': 'audio,video'},
+ 'Repositories': [{'Name': 'tizen_unified',
+ 'Options': '--ssl_verify=no',
+ 'Url': 'http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/'},
+ #'Url': 'http://download.tizen.org/live/devel:/Tizen:/Unified/standard/'},
+ {'Name': 'tizen_base_armv7l',
+ 'Options': '--ssl_verify=no',
+ 'Url': 'http://download.tizen.org/snapshots/tizen/base/latest/repos/arm/packages/'}]}
+ _instance = None
+ def __new__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super(DefaultRecipe, cls).__new__(cls, *args, **kwargs)
+ return cls._instance
+ def __init__(self):
+ logger = logging.getLogger(__name__)
+ if os.path.exists(DEFAULT_RECIPE_PATH):
+ try:
+ with file(DEFAULT_RECIPE_PATH) as f:
+ self.DEFAULT_RECIPE = yaml.load(f)
+ logger.info('Read default recipe from %s' % DEFAULT_RECIPE_PATH)
+ except IOError as err:
+ logger.info(err)
+ except yaml.YAMLError as err:
+ logger.info(err)
+ def getDefaultRecipe(self):
+ return copy.deepcopy(self.DEFAULT_RECIPE)
+ def getSystemConfig(self):
+ data = copy.deepcopy(self.DEFAULT_RECIPE)
+ for field in RECIPE_EXTEND_FIELD:
+ if field == 'Partitions':
+ continue
+ if data['Recipe'].get(field):
+ data['Recipe'][field] = []
+ if data.get(field):
+ data[field] = []
+ return data
+ def getDefaultParameter(self):
+ return [dict(url=DEFAULT_RECIPE_NAME, type='recipe')]
-def load_yaml(path):
- logger = logging.getLogger(__name__)
- try:
- with file(path) as f:
- return yaml.load(f)
- except IOError as err:
- logger(err)
- raise error.TICError(configmgr.message['server_error'])
- except yaml.YAMLError as err:
- logger(err)
- raise error.TICError(configmgr.message['recipe_parse_error'] % os.path.basename(path))
+default_recipe = DefaultRecipe()
+
+class RecipeParser(object):
+ def __init__(self, inputs):
+ # in order to priority definition
+ self.inputs = []
+ self.recipes = {}
+ self._repositories = None
+ self._recipe = None
+ # add recipe input
+ self.addRecipes(inputs)
+
+ def parse(self):
+ logger = logging.getLogger(__name__)
+ if not self.inputs:
+ return
+ self._repositories = None
+ self._recipe = None
+ repo_count = 1
+ try:
+ for data in self.inputs:
+ data_type = data.get('type')
+ # type: recipe or repository
+ if data_type == 'recipe':
+ # default recipe
+ if data.get('url') == DEFAULT_RECIPE_NAME:
+ self.recipes[data.get('url')] = default_recipe.getDefaultRecipe()
+ else:
+ with contextlib.closing(urllib2.urlopen(data.get('url'))) as op:
+ self.recipes[data.get('url')] = yaml.load(op.read())
+ elif data_type == 'repository':
+ data['name'] = 'repository_%s' % repo_count
+ repo_count += 1
+ except urllib2.HTTPError as err:
+ if err.code == 404:
+ msg = configmgr.message['recipe_not_found'] % data.get('url')
+ else:
+ msg = str(err)
+ logger.error(err)
+ raise TICError(msg)
+ except urllib2.URLError as err:
+ logger.error(err)
+ raise TICError(configmgr.message['server_error'])
+ except yaml.YAMLError as err:
+ logger.error(err)
+ raise TICError(configmgr.message['recipe_parse_error'] % data.get('url'))
+ def addRecipes(self, inputs):
+ if inputs:
+ if isinstance(inputs, list):
+ for data in inputs:
+ self.inputs.append(data)
+ else:
+ self.inputs.append(inputs)
-def convert_recipe_to_yaml(recipe, filepath):
- logger = logging.getLogger(__name__)
+ def getRepositories(self):
+ if not self._repositories:
+ self._repositories = self._getAllRepositories()
+ return self._repositories
+
+ def _getAllRepositories(self):
+ repos = []
+ name_count = 1
+ for data in self.inputs:
+ if data.get('type') == 'recipe':
+ recipe_repos = []
+ recipe_info = self.recipes[data['url']]
+ recipe_name = None
+ if recipe_info.get('Recipe'):
+ if recipe_info['Recipe'].get('Name'):
+ recipe_name = recipe_info['Recipe'].get('Name')
+ if recipe_info['Recipe'].get('Repos'):
+ for repo_name in recipe_info['Recipe'].get('Repos'):
+ isExist = False
+ if recipe_info.get('Repositories'):
+ for repo_info in recipe_info.get('Repositories'):
+ if repo_info.get('Name') == repo_name:
+ recipe_repos.append(dict(name=repo_name,
+ url=repo_info.get('Url'),
+ options=repo_info.get('Options')))
+ isExist = True
+ break
+ # repository does not exist
+ if not isExist:
+ raise TICError(configmgr.message['recipe_repo_not_exist'] % repo_name)
+ if not recipe_name:
+ recipe_name = 'recipe_%s' % name_count
+ name_count += 1
+ repos.append(dict(name=recipe_name,
+ url=data['url'],
+ repos=recipe_repos,
+ type='recipe'))
+ else:
+ repos.append(data)
+ return repos
- # config.yaml
- config = dict(Default=None, Configurations=[])
- config['Default'] = recipe.get('Default')
- # targets (only one target)
- config['Configurations'].append(recipe.get('Configurations')[0])
- platform_name = config['Configurations'][0].get('Platform')
- config[platform_name] = recipe.get(platform_name)
+ def _renameRepository(self, repo_dict, repo_name):
+ number = repo_dict.get(repo_name)
+ new_name = ''.join([repo_name, '_', str(number)])
+ while(new_name in repo_dict):
+ number += 1
+ new_name = ''.join([repo_name, '_', str(number)])
+ repo_dict[repo_name] = number + 1
+ return new_name
- dir_path = os.path.join(filepath, datetime.now().strftime('%Y%m%d%H%M%S%f'))
- make_dirs(dir_path)
- logger.info('kickstart cache dir=%s' % dir_path)
+ def getMergedRepositories(self):
+ result = []
+ repositories = self.getRepositories()
+ repo_name = {} # 'name': count
+ repo_url = {} # 'url': exist
+ for target in repositories:
+ if target.get('type') == 'recipe':
+ if target.get('repos'):
+ for repo in target.get('repos'):
+ # if repo's url is duplicated, remove it.
+ if repo.get('url') in repo_url:
+ continue
+ # if repo's name is duplicated, rename it (postfix '_count')
+ if repo.get('name') in repo_name:
+ repo['name'] = self._renameRepository(repo_name, repo['name'])
+ else:
+ repo_name[repo['name']] = 1
+ repo_url[repo['url']] = 1
+ result.append(repo)
+ else:
+ # recipe does not have repository information
+ pass
+ elif(target.get('type') == 'repository'):
+ # if repo's url is duplicated, remove it.
+ if target.get('url') in repo_url:
+ continue
+ if target['name'] in repo_name:
+ target['name'] = self._renameRepository(repo_name, target['name'])
+ else:
+ repo_name[target['name']] = 1
+ repo_url[target['url']] = 1
+ result.append(target)
+ return result
- yamlinfo = YamlInfo(dir_path,
- os.path.join(dir_path, 'configs.yaml'),
- os.path.join(dir_path, 'repos.yaml'))
+ def getMergedRecipe(self):
+ if self._recipe:
+ return self._recipe
+
+ mergedInfo = default_recipe.getSystemConfig()
+ # merge recipe info
+ for i in xrange(len(self.inputs), 0, -1):
+ if self.inputs[i-1].get('type') == 'recipe':
+ recipe = self.recipes[self.inputs[i-1].get('url')]
+ if recipe.get('Recipe'):
+ for k, v in recipe.get('Recipe').iteritems():
+ if not v:
+ continue
+ if k in RECIPE_EXTEND_FIELD:
+ if k == 'Repos':
+ continue
+ for j in xrange(len(v), 0, -1):
+ mergedInfo['Recipe'][k].append(v[j-1])
+ else:
+ mergedInfo['Recipe'][k] = v
+ for fieldName in RECIPE_EXTEND_FIELD:
+ if recipe.get(fieldName):
+ if fieldName == 'Repositories':
+ continue
+ for data in recipe.get(fieldName):
+ mergedInfo[fieldName].append(data)
+ # reverse order
+ for extName in RECIPE_EXTEND_FIELD:
+ if mergedInfo['Recipe'].get(extName):
+ mergedInfo['Recipe'][extName].reverse()
+ if mergedInfo.get(extName):
+ mergedInfo[extName].reverse()
+
+ # set repositories
+ mergedInfo['Repositories'] = self.getMergedRepositories()
+ if mergedInfo.get('Repositories'):
+ for repo in mergedInfo['Repositories']:
+ mergedInfo['Recipe']['Repos'].append(repo['name'])
+ return mergedInfo
- # configs.yaml
- with open(yamlinfo.configs, 'w') as outfile:
- yaml.safe_dump(config, outfile, default_flow_style=False)
+ def export2Recipe(self, packages, outdir, filename='recipe.yaml'):
+ logger = logging.getLogger(__name__)
+ recipe = self.getMergedRecipe()
+ make_dirs(outdir)
+ reciep_path = os.path.join(outdir, filename)
+ # set packages
+ if packages:
+ recipe['Recipe']['ExtraPackages'] = packages
+ # set repositories
+ if 'Repositories' in recipe:
+ repos = []
+ for repo in recipe.get('Repositories'):
+ repos.append(dict(Name= repo.get('name'),
+ Url= repo.get('url'),
+ Options = repo.get('options')))
+ recipe['Repositories'] = repos
- # repo.yaml
- if 'Repositories' in recipe:
- repos = {}
- repos['Repositories'] = recipe['Repositories']
- with open(yamlinfo.repos, 'w') as outfile:
- yaml.safe_dump(repos, outfile, default_flow_style=False)
+ try:
+ with open(reciep_path, 'w') as outfile:
+ yaml.safe_dump(recipe, outfile, line_break="\n", width=1000, default_flow_style=False)
+ #outfile.write(stream.replace('\n', '\n\n'))
+ if not os.path.exists(reciep_path):
+ raise TICError('No recipe file was created')
+ except IOError as err:
+ logger.info(err)
+ raise TICError('Could not read the recipe files')
+ except yaml.YAMLError as err:
+ logger.info(err)
+ raise TICError(configmgr.message['recipe_convert_error'])
+ return reciep_path
- # partition info
- if 'Partitions' in recipe:
- for partition in recipe.get('Partitions'):
- partition_path = os.path.join(dir_path, 'partitions')
- file_name = partition.get('Name')
- temp = os.path.join(partition_path, file_name)
- write(temp, partition['Contents'])
+ def export2Yaml(self, packages, filepath):
+ logger = logging.getLogger(__name__)
+ recipe = self.getMergedRecipe()
+ # config.yaml
+ config = dict(Default=None, Configurations=[])
+ config['Default'] = recipe.get('Recipe')
+ if packages:
+ config['Default']['ExtraPackages'] = packages
+ # targets (only one target)
+ extraconfs = dict(Platform=DUMMY_PLATFORM,
+ ExtraPackages=[],
+ Name= recipe['Recipe'].get('Name'),
+ FileName= recipe['Recipe'].get('FileName'),
+ Part= recipe['Recipe'].get('Part'))
+ config['Configurations'].append(extraconfs)
+ config[DUMMY_PLATFORM] = dict(ExtraPackages=[])
+
+ dir_path = os.path.join(filepath, datetime.now().strftime('%Y%m%d%H%M%S%f'))
+ make_dirs(dir_path)
+ logger.info('kickstart cache dir=%s' % dir_path)
+
+ yamlinfo = YamlInfo(dir_path,
+ os.path.join(dir_path, 'configs.yaml'),
+ os.path.join(dir_path, 'repos.yaml'))
+
+ # configs.yaml
+ with open(yamlinfo.configs, 'w') as outfile:
+ yaml.safe_dump(config, outfile, default_flow_style=False)
- # script.post
- if 'PostScripts' in recipe:
- for script in recipe.get('PostScripts'):
- script_path = os.path.join(dir_path, 'scripts')
- script_type = script.get('Type')
- if script_type and script_type == 'nochroot':
- file_name = '%s.nochroot' % script.get('Name')
- else:
+ # repo.yaml
+ if 'Repositories' in recipe:
+ repos = dict(Repositories= [])
+ for repo in recipe.get('Repositories'):
+ repos['Repositories'].append(dict(Name= repo.get('name'),
+ Url= repo.get('url'),
+ Options = repo.get('options')))
+ with open(yamlinfo.repos, 'w') as outfile:
+ yaml.safe_dump(repos, outfile, default_flow_style=False)
+
+ # partition info
+ if 'Partitions' in recipe:
+ for partition in recipe.get('Partitions'):
+ partition_path = os.path.join(dir_path, 'partitions')
+ file_name = partition.get('Name')
+ temp = os.path.join(partition_path, file_name)
+ write(temp, partition['Contents'])
+
+ # script.post
+ if 'PostScripts' in recipe:
+ for script in recipe.get('PostScripts'):
+ script_path = os.path.join(dir_path, 'scripts')
file_name = '%s.post' % script.get('Name')
- write(os.path.join(script_path, file_name), script['Contents'])
+ write(os.path.join(script_path, file_name), script['Contents'])
+ if 'NoChrootScripts' in recipe:
+ for script in recipe.get('NoChrootScripts'):
+ script_path = os.path.join(dir_path, 'scripts')
+ file_name = '%s.nochroot' % script.get('Name')
+ write(os.path.join(script_path, file_name), script['Contents'])
+ return yamlinfo
- return yamlinfo
+def load_yaml(path):
+ logger = logging.getLogger(__name__)
+ try:
+ with file(path) as f:
+ return yaml.load(f)
+ except IOError as err:
+ logger.info(err)
+ raise TICError(configmgr.message['server_error'])
+ except yaml.YAMLError as err:
+ logger.info(err)
+ raise TICError(configmgr.message['recipe_parse_error'] % os.path.basename(path))
YamlType = collections.namedtuple('YamlInfo', 'cachedir, configs, repos')
def YamlInfo(cachedir, configs, repos):
- return YamlType(cachedir, configs, repos)
\ No newline at end of file
+ return YamlType(cachedir, configs, repos)
+
+if __name__ == '__main__':
+ inputs = [{'url': DEFAULT_RECIPE_NAME, 'type': 'recipe'}, {'url': 'http://localhost/repo/recipe/recipe1.yaml', 'type': 'recipe'}]
+ parser = RecipeParser()
+ parser.addRecipes(inputs)
+ parser.parse()
+ print(parser.repositories)