2 # Copyright (c) 2000 - 2016 Samsung Electronics Co., Ltd. All rights reserved.
5 # @author Chulwoo Shin <cw1.shin@samsung.com>
7 # Licensed under the Apache License, Version 2.0 (the "License");
8 # you may not use this file except in compliance with the License.
9 # You may obtain a copy of the License at
11 # http://www.apache.org/licenses/LICENSE-2.0
13 # Unless required by applicable law or agreed to in writing, software
14 # distributed under the License is distributed on an "AS IS" BASIS,
15 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 # See the License for the specific language governing permissions and
17 # limitations under the License.
29 from datetime import datetime
30 from tic.utils.error import TICError
31 from tic.utils.file import write, make_dirs
32 from tic.config import configmgr
34 DUMMY_PLATFORM = 'DummyPlatform'
35 DEFAULT_RECIPE_NAME = 'default_recipe'
36 DEFAULT_RECIPE_PATH = configmgr.setting['default_recipe']
37 RECIPE_EXTEND_FIELD = {'Repos', 'Groups', 'Repositories', 'Partitions', 'ExtraPackages', 'RemovePackages', 'PostScripts', 'NoChrootScripts'}
39 class DefaultRecipe(object):
40 DEFAULT_RECIPE = {'NoChrootScripts': [{'Contents': 'if [ -n "$IMG_NAME" ]; then\n echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/tizen-release\n echo "BUILD_ID=$IMG_NAME" >> $INSTALL_ROOT/etc/os-release\nfi\n',
41 'Name': 'buildname'}],
42 'Partitions': [{'Contents': 'part / --size=2000 --ondisk mmcblk0p --fstype=ext4 --label=rootfs --extoptions="-J size=16"\npart /opt/ --size=1000 --ondisk mmcblk0p --fstype=ext4 --label=system-data --extoptions="-m 0"\npart /boot/kernel/mod_tizen_tm1/lib/modules --size=12 --ondisk mmcblk0p --fstype=ext4 --label=modules\n',
43 'Name': 'default-part'}],
44 'PostScripts': [{'Contents': '#!/bin/sh\necho "#################### generic-base.post ####################"\n\ntest ! -e /opt/var && mkdir -p /opt/var\ntest -d /var && cp -arf /var/* /opt/var/\nrm -rf /var\nln -snf opt/var /var\n\ntest ! -e /opt/usr/home && mkdir -p /opt/usr/home\ntest -d /home && cp -arf /home/* /opt/usr/home/\nrm -rf /home\nln -snf opt/usr/home /home\n\nbuild_ts=$(date -u +%s)\nbuild_date=$(date -u --date @$build_ts +%Y%m%d_%H%M%S)\nbuild_time=$(date -u --date @$build_ts +%H:%M:%S)\n\nsed -ri \\\n\t-e \'s|@BUILD_ID[@]|@BUILD_ID@|g\' \\\n\t-e "s|@BUILD_DATE[@]|$build_date|g" \\\n\t-e "s|@BUILD_TIME[@]|$build_time|g" \\\n\t-e "s|@BUILD_TS[@]|$build_ts|g" \\\n\t/etc/tizen-build.conf\n\n# setup systemd default target for user session\ncat <<\'EOF\' >>/usr/lib/systemd/user/default.target\n[Unit]\nDescription=User session default target\nEOF\nmkdir -p /usr/lib/systemd/user/default.target.wants\n\n# sdx: fix smack labels on /var/log\nchsmack -a \'*\' /var/log\n\n# create appfw dirs inside homes\nfunction generic_base_user_exists() {\n user=$1\n getent passwd | grep -q ^${user}:\n}\n\nfunction generic_base_user_home() {\n user=$1\n getent passwd | grep ^${user}: | cut -f6 -d\':\'\n}\n\nfunction generic_base_fix_user_homedir() {\n user=$1\n generic_base_user_exists $user || return 1\n\nhomedir=$(generic_base_user_home $user)\n mkdir -p $homedir/apps_rw\n for appdir in desktop manifest dbspace; do\n mkdir -p $homedir/.applications/$appdir\n done\n find $homedir -type d -exec chsmack -a User {} \\;\n chown -R $user:users $homedir\n return 0\n}\n\n# fix TC-320 for SDK\n. /etc/tizen-build.conf\n[ "${TZ_BUILD_WITH_EMULATOR}" == "1" ] && generic_base_fix_user_homedir developer\n\n# Add info.ini for system-info CAPI (TC-2047)\n/etc/make_info_file.sh',
45 'Name': 'generic-base'}],
46 'Recipe': {'Active': True,
47 'Architecture': 'armv7l',
50 'BootloaderAppend': 'rw vga=current splash rootwait rootfstype=ext4 plymouth.enable=0',
51 'BootloaderOptions': '--ptable=gpt --menus="install:Wipe and Install:systemd.unit=system-installer.service:test"',
52 'BootloaderTimeout': 3,
53 'DefaultUser': 'guest',
54 'DefaultUserPass': 'tizen',
57 'FileName': 'default-armv7l',
60 'Language': 'en_US.UTF-8',
61 'Mic2Options': '-f raw --fstab=uuid --copy-kernel --compress-disk-image=bz2 --generate-bmap',
62 'Name': 'default-recipe',
63 'NoChrootScripts': ['buildname'],
64 'Part': 'default-part',
65 'PostScripts': ['generic-base'],
67 'Repos': ['tizen_unified', 'tizen_base_armv7l'],
72 'Timezone': 'Asia/Seoul',
73 'UserGroups': 'audio,video'},
74 'Repositories': [{'Name': 'tizen_unified',
75 'Options': '--ssl_verify=no',
76 'Url': 'http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/'},
77 #'Url': 'http://download.tizen.org/live/devel:/Tizen:/Unified/standard/'},
78 {'Name': 'tizen_base_armv7l',
79 'Options': '--ssl_verify=no',
80 'Url': 'http://download.tizen.org/snapshots/tizen/base/latest/repos/arm/packages/'}]}
82 def __new__(cls, *args, **kwargs):
84 cls._instance = super(DefaultRecipe, cls).__new__(cls, *args, **kwargs)
87 logger = logging.getLogger(__name__)
88 if os.path.exists(DEFAULT_RECIPE_PATH):
90 with file(DEFAULT_RECIPE_PATH) as f:
91 self.DEFAULT_RECIPE = yaml.load(f)
92 logger.info('Read default recipe from %s' % DEFAULT_RECIPE_PATH)
93 except IOError as err:
95 except yaml.YAMLError as err:
97 def getDefaultRecipe(self):
98 return copy.deepcopy(self.DEFAULT_RECIPE)
99 def getSystemConfig(self):
100 data = copy.deepcopy(self.DEFAULT_RECIPE)
101 for field in RECIPE_EXTEND_FIELD:
102 if field == 'Partitions':
104 if data['Recipe'].get(field):
105 data['Recipe'][field] = []
109 def getDefaultParameter(self):
110 return [dict(url=DEFAULT_RECIPE_NAME, type='recipe')]
112 default_recipe = DefaultRecipe()
114 class RecipeParser(object):
115 def __init__(self, inputs):
116 # in order to priority definition
119 self._repositories = None
122 self.addRecipes(inputs)
125 logger = logging.getLogger(__name__)
128 self._repositories = None
132 for data in self.inputs:
133 data_type = data.get('type')
134 # type: recipe or repository
135 if data_type == 'recipe':
137 if data.get('url') == DEFAULT_RECIPE_NAME:
138 self.recipes[data.get('url')] = default_recipe.getDefaultRecipe()
140 with contextlib.closing(urllib2.urlopen(data.get('url'))) as op:
141 self.recipes[data.get('url')] = yaml.load(op.read())
142 elif data_type == 'repository':
143 data['name'] = 'repository_%s' % repo_count
145 except urllib2.HTTPError as err:
147 msg = configmgr.message['recipe_not_found'] % data.get('url')
152 except urllib2.URLError as err:
154 raise TICError(configmgr.message['server_error'])
155 except yaml.YAMLError as err:
157 raise TICError(configmgr.message['recipe_parse_error'] % data.get('url'))
159 def addRecipes(self, inputs):
161 if isinstance(inputs, list):
163 self.inputs.append(data)
165 self.inputs.append(inputs)
167 def getRepositories(self):
168 if not self._repositories:
169 self._repositories = self._getAllRepositories()
170 return self._repositories
172 def _getAllRepositories(self):
175 for data in self.inputs:
176 if data.get('type') == 'recipe':
178 recipe_info = self.recipes[data['url']]
180 if recipe_info.get('Recipe'):
181 if recipe_info['Recipe'].get('Name'):
182 recipe_name = recipe_info['Recipe'].get('Name')
183 if recipe_info['Recipe'].get('Repos'):
184 for repo_name in recipe_info['Recipe'].get('Repos'):
186 if recipe_info.get('Repositories'):
187 for repo_info in recipe_info.get('Repositories'):
188 if repo_info.get('Name') == repo_name:
189 recipe_repos.append(dict(name=repo_name,
190 url=repo_info.get('Url'),
191 options=repo_info.get('Options')))
194 # repository does not exist
196 raise TICError(configmgr.message['recipe_repo_not_exist'] % repo_name)
198 recipe_name = 'recipe_%s' % name_count
200 repos.append(dict(name=recipe_name,
208 def _renameRepository(self, repo_dict, repo_name):
209 number = repo_dict.get(repo_name)
210 new_name = ''.join([repo_name, '_', str(number)])
211 while(new_name in repo_dict):
213 new_name = ''.join([repo_name, '_', str(number)])
214 repo_dict[repo_name] = number + 1
217 def getMergedRepositories(self):
219 repositories = self.getRepositories()
220 repo_name = {} # 'name': count
221 repo_url = {} # 'url': exist
222 for target in repositories:
223 if target.get('type') == 'recipe':
224 if target.get('repos'):
225 for repo in target.get('repos'):
226 # if repo's url is duplicated, remove it.
227 if repo.get('url') in repo_url:
229 # if repo's name is duplicated, rename it (postfix '_count')
230 if repo.get('name') in repo_name:
231 repo['name'] = self._renameRepository(repo_name, repo['name'])
233 repo_name[repo['name']] = 1
234 repo_url[repo['url']] = 1
237 # recipe does not have repository information
239 elif(target.get('type') == 'repository'):
240 # if repo's url is duplicated, remove it.
241 if target.get('url') in repo_url:
243 if target['name'] in repo_name:
244 target['name'] = self._renameRepository(repo_name, target['name'])
246 repo_name[target['name']] = 1
247 repo_url[target['url']] = 1
248 result.append(target)
251 def getMergedRecipe(self):
255 mergedInfo = default_recipe.getSystemConfig()
257 for i in xrange(len(self.inputs), 0, -1):
258 if self.inputs[i-1].get('type') == 'recipe':
259 recipe = self.recipes[self.inputs[i-1].get('url')]
260 if recipe.get('Recipe'):
261 for k, v in recipe.get('Recipe').iteritems():
264 if k in RECIPE_EXTEND_FIELD:
267 for j in xrange(len(v), 0, -1):
268 mergedInfo['Recipe'][k].append(v[j-1])
270 mergedInfo['Recipe'][k] = v
271 for fieldName in RECIPE_EXTEND_FIELD:
272 if recipe.get(fieldName):
273 if fieldName == 'Repositories':
275 for data in recipe.get(fieldName):
276 mergedInfo[fieldName].append(data)
278 for extName in RECIPE_EXTEND_FIELD:
279 if mergedInfo['Recipe'].get(extName):
280 mergedInfo['Recipe'][extName].reverse()
281 if mergedInfo.get(extName):
282 mergedInfo[extName].reverse()
285 mergedInfo['Repositories'] = self.getMergedRepositories()
286 if mergedInfo.get('Repositories'):
287 for repo in mergedInfo['Repositories']:
288 mergedInfo['Recipe']['Repos'].append(repo['name'])
291 def export2Recipe(self, packages, outdir, filename='recipe.yaml'):
292 logger = logging.getLogger(__name__)
293 recipe = self.getMergedRecipe()
295 reciep_path = os.path.join(outdir, filename)
298 recipe['Recipe']['ExtraPackages'] = packages
300 if 'Repositories' in recipe:
302 for repo in recipe.get('Repositories'):
303 repos.append(dict(Name= repo.get('name'),
304 Url= repo.get('url'),
305 Options = repo.get('options')))
306 recipe['Repositories'] = repos
309 with open(reciep_path, 'w') as outfile:
310 yaml.safe_dump(recipe, outfile, line_break="\n", width=1000, default_flow_style=False)
311 #outfile.write(stream.replace('\n', '\n\n'))
312 if not os.path.exists(reciep_path):
313 raise TICError('No recipe file was created')
314 except IOError as err:
316 raise TICError('Could not read the recipe files')
317 except yaml.YAMLError as err:
319 raise TICError(configmgr.message['recipe_convert_error'])
322 def export2Yaml(self, packages, filepath):
323 logger = logging.getLogger(__name__)
324 recipe = self.getMergedRecipe()
326 config = dict(Default=None, Configurations=[])
327 config['Default'] = recipe.get('Recipe')
329 config['Default']['ExtraPackages'] = packages
330 # targets (only one target)
331 extraconfs = dict(Platform=DUMMY_PLATFORM,
333 Name= recipe['Recipe'].get('Name'),
334 FileName= recipe['Recipe'].get('FileName'),
335 Part= recipe['Recipe'].get('Part'))
336 config['Configurations'].append(extraconfs)
337 config[DUMMY_PLATFORM] = dict(ExtraPackages=[])
339 dir_path = os.path.join(filepath, datetime.now().strftime('%Y%m%d%H%M%S%f'))
341 logger.info('kickstart cache dir=%s' % dir_path)
343 yamlinfo = YamlInfo(dir_path,
344 os.path.join(dir_path, 'configs.yaml'),
345 os.path.join(dir_path, 'repos.yaml'))
348 with open(yamlinfo.configs, 'w') as outfile:
349 yaml.safe_dump(config, outfile, default_flow_style=False)
352 if 'Repositories' in recipe:
353 repos = dict(Repositories= [])
354 for repo in recipe.get('Repositories'):
355 repos['Repositories'].append(dict(Name= repo.get('name'),
356 Url= repo.get('url'),
357 Options = repo.get('options')))
358 with open(yamlinfo.repos, 'w') as outfile:
359 yaml.safe_dump(repos, outfile, default_flow_style=False)
362 if 'Partitions' in recipe:
363 for partition in recipe.get('Partitions'):
364 partition_path = os.path.join(dir_path, 'partitions')
365 file_name = partition.get('Name')
366 temp = os.path.join(partition_path, file_name)
367 write(temp, partition['Contents'])
370 if 'PostScripts' in recipe:
371 for script in recipe.get('PostScripts'):
372 script_path = os.path.join(dir_path, 'scripts')
373 file_name = '%s.post' % script.get('Name')
374 write(os.path.join(script_path, file_name), script['Contents'])
375 if 'NoChrootScripts' in recipe:
376 for script in recipe.get('NoChrootScripts'):
377 script_path = os.path.join(dir_path, 'scripts')
378 file_name = '%s.nochroot' % script.get('Name')
379 write(os.path.join(script_path, file_name), script['Contents'])
383 logger = logging.getLogger(__name__)
385 with file(path) as f:
387 except IOError as err:
389 raise TICError(configmgr.message['server_error'])
390 except yaml.YAMLError as err:
392 raise TICError(configmgr.message['recipe_parse_error'] % os.path.basename(path))
394 YamlType = collections.namedtuple('YamlInfo', 'cachedir, configs, repos')
395 def YamlInfo(cachedir, configs, repos):
396 return YamlType(cachedir, configs, repos)
398 if __name__ == '__main__':
399 inputs = [{'url': DEFAULT_RECIPE_NAME, 'type': 'recipe'}, {'url': 'http://localhost/repo/recipe/recipe1.yaml', 'type': 'recipe'}]
400 parser = RecipeParser()
401 parser.addRecipes(inputs)
403 print(parser.repositories)