--- /dev/null
+#!/usr/bin/env python
+"""
+ job_sync_snapdiff.py
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Generate repo and image diffs, sync them to tizen.org.
+
+"""
+
+import os
+import re
+import sys
+from distutils.sysconfig import get_python_lib
+
+sys.path.insert(0, get_python_lib())
+
+import bs4
+from jinja2 import Environment, FileSystemLoader
+import requests
+import snapdiff
+
+from common.buildtrigger import trigger_info
+from common.utils import sync
+
+
+def search_last_build(build_url, released=False):
+ """Search the lastest build in build_url.
+
+ :param build_url: build url contains builds.
+ :param released: is a released url or not.
+ :return: path. the last build url.
+ """
+ res = requests.get(build_url)
+ if res.status_code != 200:
+ return
+ soup = bs4.BeautifulSoup(res.text)
+ pattern = re.compile('^tizen_[0-9]{8}.[0-9]+/$')
+ builds = [i.text for i in soup.find_all(href=pattern)]
+
+ if released:
+ last_build = builds[-1]
+ elif len(builds) > 1:
+ last_build = builds[-2]
+ else:
+ last_build = None
+
+ return os.path.join(build_url, last_build) if last_build else None
+
+def get_last_build(url_pub_base, repo_path):
+ """Return the second lastest build.
+
+ :param url_pub_base: the public base url.
+ :param repo_path: the relative repo path.
+ :return: path. the second lastest build url.
+ """
+ last_build_url = os.path.join(url_pub_base, repo_path[:repo_path.rfind('/')])
+ return search_last_build(last_build_url)
+
+def get_released_build(release_base, repo_path):
+ """Return the lastest released build.
+
+ :param release_base: release base url.
+ :param repo_path: the relative repo path.
+ :return: path tuple. the lastest daily and weekly builds.
+ """
+ base_contains = '/'.join(repo_path.split('/')[1:-1])
+ daily_url = os.path.join(release_base, 'daily', base_contains)
+ weekly_url = os.path.join(release_base, 'weekly', base_contains)
+ return search_last_build(daily_url, True), search_last_build(weekly_url, True)
+
+def _get_buildid(url):
+ """Get build id from url"""
+ result = re.search(r'\w*_\d{8}\.\d*', url)
+ return result.group(0) if result else url
+
+def _get_name(url):
+ """Get image name from url"""
+ name = url[:-1].split('/')[-1]
+ return name or url
+
+def generate_diff(old_url, new_url, name, style='repo'):
+ """Generate repo or image diff.
+
+ :param old_url: old repo url.
+ :param new_url: new repo url.
+ :param repo_path: the repo base path.
+ :param name: diff prefix name.
+ """
+ sync_out = os.path.join(os.getenv('WORKSPACE'), 'outdir')
+ reports = os.path.join(sync_out, 'builddata', 'reports')
+ diff_name = '-'.join([name, _get_buildid(old_url), _get_buildid(new_url)])
+ snapdiff.diff_to_dist(old_url, new_url, reports, style, diff_name)
+ return '%s.html' % diff_name
+
+def sync_world(template_name, repo_path, **kwargs):
+ """Sync repo and image diff.
+
+ :param template_name: template we will use generate sumary index.
+ :param kwargs: all context we use render template.
+ """
+ template_path = os.path.join(os.path.dirname(__file__), 'templates')
+ env = Environment(loader = FileSystemLoader(template_path))
+ template = env.get_template(template_name)
+ content = template.render(kwargs)
+ sync_out = os.path.join(os.getenv('WORKSPACE'), 'outdir')
+ with open(os.path.join(sync_out, 'builddata', 'reports', 'index.html'), 'w') as fp:
+ fp.write(content)
+ sync(sync_out, os.path.join(os.getenv('IMG_SYNC_DEST_BASE'), repo_path))
+
+def main():
+ """The main body"""
+ info = trigger_info(os.getenv('TRIGGER_INFO'))
+ repo_name = list(info)[0]
+ content = info[repo_name]
+
+ if content is None:
+ return
+
+ url_pub_base = content[0]['url_pub_base']
+ repo_path = content[0]['repo_path']
+
+ diff_root = os.path.join(url_pub_base, repo_path)
+ current_repo_url = os.path.join(diff_root, 'repos', repo_name, 'packages')
+
+ context = {'id': content[0]['buildid']}
+ context['repo'] = []
+
+ # generate current and last repo diff
+ last_repo_base = get_last_build(url_pub_base, repo_path)
+
+ if last_repo_base:
+ last_repo_url = os.path.join(last_repo_base, 'repos', repo_name, 'packages')
+ last_current = generate_diff(last_repo_url, current_repo_url, 'last_current')
+ last_id = _get_buildid(last_repo_url)
+ context['repo'].append(('last_build', last_id, last_current))
+
+ # releases' url
+ releases_url = os.path.join(url_pub_base, 'releases')
+
+ # generate current and releases repo diff
+ daily_repo_base, weekly_repo_base = get_released_build(releases_url, repo_path)
+
+ if daily_repo_base:
+ daily_repo_url = os.path.join(daily_repo_base, 'repos', repo_name, 'packages')
+ daily_current = generate_diff(daily_repo_url, current_repo_url, 'daily_current')
+ daily_id = _get_buildid(daily_repo_url)
+ context['repo'].append(('daily_build', daily_id, daily_current))
+
+ if weekly_repo_base:
+ weekly_repo_url = os.path.join(weekly_repo_base, 'repos', repo_name, 'packages')
+ weekly_current = generate_diff(weekly_repo_url, current_repo_url, 'weekly_current')
+ weekly_id = _get_buildid(weekly_repo_url)
+ context['repo'].append(('weekly_build', weekly_id, weekly_current))
+
+ # generate image diff
+ context['images'] = {}
+
+ for item in content:
+ current_image_url = os.path.join(diff_root, item['images_path'])
+ last_image_url = os.path.join(last_repo_base, item['images_path']) if last_repo_base else None
+ daily_image_url = os.path.join(daily_repo_base, item['images_path']) if daily_repo_base else None
+ weekly_image_url = os.path.join(weekly_repo_base, item['images_path']) if weekly_repo_base else None
+
+ context['images'][item['name']] = []
+
+ # collect each image information.
+ if requests.get(current_image_url).status_code == 200:
+ if last_image_url and requests.get(last_image_url).status_code == 200:
+ name = generate_diff(last_image_url, current_image_url, item['name']+'_last_current', style='image')
+ context['images'][item['name']].append(('last_build', last_id, name))
+ if daily_image_url and requests.get(daily_image_url).status_code == 200:
+ name = generate_diff(daily_image_url, current_image_url, item['name']+'_daily_current', style='image')
+ context['images'][item['name']].append(('daily_build', daily_id, name))
+ if weekly_image_url and requests.get(weekly_image_url).status_code == 200:
+ name = generate_diff(weekly_image_url, current_image_url, item['name']+'_weekly_current', style='image')
+ context['images'][item['name']].append(('weekly_build', weekly_id, name))
+
+ # if image is empty, pop it.
+ if not context['images'][item['name']]:
+ context['images'].pop(item['name'])
+
+ # sync all
+ sync_world('index.html', repo_path, **context)
+
+if __name__ == '__main__':
+ sys.exit(main())