Creates the update in one go.
Delete old getpacs tools and updateinfo.py.
Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
+++ /dev/null
-#!/usr/bin/python
-
-from sets import Set
-import csv
-import urllib2
-from optparse import OptionParser
-import os
-import re, base64
-import ConfigParser
-import shutil
-
-CACHE_DIR = "cache"
-
-def read_config(config_file):
- config_file = os.path.expanduser(config_file)
- parser = ConfigParser.SafeConfigParser()
- parser.read(config_file)
- return parser
-
-def http_get(url, credentials=(None, None)):
- print "Downloading %s" %url
- request = urllib2.Request(url)
- if credentials[0] and credentials[1]:
- base64string = base64.encodestring('%s:%s' % (credentials[0], credentials[1])).replace('\n', '')
- request.add_header("Authorization", "Basic %s" % base64string)
- html_page = urllib2.urlopen(request)
- return html_page
-
-def download(url, fname, credentials, outdir, cachedir):
- cached_file = os.path.join(cachedir, fname)
- if os.path.exists(cached_file):
- print "File cache hit: %s" % fname
- else:
- ret = http_get(os.path.join(url, fname), credentials)
- cache = open(cached_file, "w")
- cache.write(ret.read())
- cache.close()
- if outdir:
- dest_file = os.path.join(outdir, fname)
- if not os.path.exists(dest_file):
- shutil.copy2(cached_file, dest_file)
-
-def get_package_list(image_name, base_url, build_id, credentials, outdir, cachedir):
- cache_file = "%s/%s-%s.packages" %(cachedir, image_name, build_id )
- package_file = None
- if not os.path.exists(cache_file):
- image_packages = "%s/%s/images/%s/%s-%s.packages" %(base_url, build_id, image_name, image_name, build_id )
- #print image_packages
- package_file = http_get(image_packages, credentials)
- cache = open(cache_file, "w")
- cache.write(package_file.read())
- cache.close()
- with open(cache_file, "rb") as package_file:
- packages = {}
- pkgreader = csv.reader(package_file, delimiter=' ', quotechar='|')
- for row in pkgreader:
- pkg = row[0].split(".")
- if len(row)>2:
- packages[pkg[0]] = {'scm': row[2], 'version': row[1], 'arch': pkg[1]}
- else:
- packages[pkg[0]] = {'scm': None, 'version': row[1], 'arch': pkg[1]}
- shutil.copy2(cache_file, os.path.join(outdir, "packages"))
-
- return packages
-
-parser = OptionParser()
-parser.add_option("-o", "--old", dest="old", metavar="OLD", help="Old snapshot")
-parser.add_option("-n", "--new", dest="new", metavar="NEW", help="New snapshot")
-parser.add_option("-t", "--type", dest="type", metavar="TYPE", help="Release type")
-parser.add_option("-i", "--image", dest="image", metavar="IMAGE", help="Image Name")
-parser.add_option("-u", "--username", dest="username", metavar="USERNAME", help="Username for https")
-parser.add_option("-p", "--password", dest="password", metavar="PASSWD", help="Password for https")
-parser.add_option("--outdir", dest="outdir", help="Output directory")
-
-(options, args) = parser.parse_args()
-
-config = read_config('~/.swuprc')
-
-DAILY="/pc/releases/daily/trunk"
-WEEKLY="/pc/releases/weekly/trunk"
-SNAPSHOTS="/snapshots/trunk/pc/"
-BASE="https://download.tz.otcshare.org/"
-
-if options.type == "daily":
- release_url = "%s/%s" %(BASE, DAILY)
-if options.type == "weekly":
- release_url = "%s/%s" %(BASE, WEEKLY)
-else:
- release_url = "%s/%s" %(BASE, SNAPSHOTS)
-
-credentials = [None, None]
-if options.username:
- credentials[0] = options.username
-elif config.has_option('DEFAULT', 'username'):
- credentials[0] = config.get('DEFAULT', 'username')
-if options.password:
- credentials[1] = options.password
-elif config.has_option('DEFAULT', 'password'):
- credentials[1] = config.get('DEFAULT', 'password')
-# Initialize cache dir
-if config.has_option('DEFAULT', 'cache-dir'):
- CACHE_DIR = config.get('DEFAULT', 'cache-dir')
-CACHE_DIR = os.path.abspath(os.path.expanduser(CACHE_DIR))
-packages_files_dir = os.path.join(CACHE_DIR, 'packages-files')
-if not os.path.exists(packages_files_dir):
- os.makedirs(packages_files_dir)
-
-outdir = options.outdir if options.outdir else "update-%s-to-%s" % (options.old, options.new)
-if not os.path.exists(outdir):
- os.makedirs(outdir)
-else:
- print "Cleaning up %s" % outdir
- for filename in ['rpms', 'new']:
- filepath = os.path.join(outdir, filename)
- if os.path.exists(filepath):
- shutil.rmtree(os.path.join(filepath))
-
-p1 = get_package_list(options.image, release_url, options.old, credentials, outdir, packages_files_dir)
-p2 = get_package_list(options.image, release_url, options.new, credentials, outdir, packages_files_dir)
-
-pkgs1 = {'%s|%s' % (pkg, attr['arch']) for pkg, attr in p1.iteritems()}
-pkgs2 = {'%s|%s' % (pkg, attr['arch']) for pkg, attr in p2.iteritems()}
-newpkgs = [pkg.split('|')[0] for pkg in pkgs2.difference(pkgs1)]
-
-pkgs1 = {'%s|%s' % (pkg, attr['version']) for pkg, attr in p1.iteritems()}
-pkgs2 = {'%s|%s' % (pkg, attr['version']) for pkg, attr in p2.iteritems()}
-changedpkgs = [pkg.split('|')[0] for pkg in pkgs2.difference(pkgs1) if pkg.split('|')[0] in p1]
-
-cached_pkgs_dir = os.path.join(CACHE_DIR, 'rpms')
-if not os.path.exists(cached_pkgs_dir):
- os.makedirs(cached_pkgs_dir)
-
-new_pkgs_dir = os.path.join(outdir, 'new')
-if not os.path.exists(new_pkgs_dir):
- os.makedirs(new_pkgs_dir)
-changed_pkgs_dir = os.path.join(outdir, 'rpms')
-if not os.path.exists(changed_pkgs_dir):
- os.makedirs(changed_pkgs_dir)
-
-old_repourl = "%s/%s/repos/pc/x86_64/packages/" % (release_url, options.old)
-new_repourl = "%s/%s/repos/pc/x86_64/packages/" % (release_url, options.new)
-
-with open(os.path.join(outdir, "repourl"), "w") as repourlfile:
- repourlfile.write("%s\n" % new_repourl)
-
-for p in newpkgs:
- rpm = "%s-%s.%s.rpm" % (p, p2[p]['version'], p2[p]['arch'])
- arch = p2[p]['arch']
- download("%s/%s" % (new_repourl, arch), rpm, credentials, new_pkgs_dir, cached_pkgs_dir)
-
-for p in changedpkgs:
- rpm = "%s-%s.%s.rpm" % (p, p1[p]['version'], p1[p]['arch'])
- arch = p1[p]['arch']
- download("%s/%s" % (old_repourl, arch), rpm, credentials, None, cached_pkgs_dir)
- rpm = "%s-%s.%s.rpm" % (p, p2[p]['version'], p2[p]['arch'])
- download("%s/%s" % (new_repourl, arch), rpm, credentials, changed_pkgs_dir, cached_pkgs_dir)
-
+++ /dev/null
-#!/bin/sh
-
-BUILD_OLD=$1
-BUILD_NEW=$2
-if [ -z "$BUILD_OLD" -o -z "$BUILD_NEW" ]; then
- echo "You need to provide old and new snapshot IDs"
- exit 1
-fi
-USER="nashif"
-PASS=""
-WGET="/usr/bin/wget -q --timestamping "
-DAILY="/pc/releases/daily/trunk"
-WEEKLY="/pc/releases/weekly/trunk"
-SNAPSHOTS="/snapshots/trunk/pc/"
-
-RELEASE_TYPE=$SNAPSHOTS
-
-BASE_DIR=$PWD
-wget https://$USER:$PASS@download.tz.otcshare.org/${RELEASE_TYPE}/${BUILD_OLD}/images/gnome/gnome-${BUILD_OLD}.packages -O old
-wget https://$USER:$PASS@download.tz.otcshare.org/${RELEASE_TYPE}/${BUILD_NEW}/images/gnome/gnome-${BUILD_NEW}.packages -O new
-awk -F' ' ' { print $1 } ' $BASE_DIR/old | sort > $BASE_DIR/p0
-awk -F' ' ' { print $1 } ' $BASE_DIR/new | sort > $BASE_DIR/p1
-
-comm -3 $BASE_DIR/p0 $BASE_DIR/p1 > $BASE_DIR/new_packages
-
-OLD_IFS=$IFS
-IFS=$'\n'
-mkdir -p old_packages
-mkdir -p update/rpms
-mkdir -p update/new
-pushd old_packages
-for i in `diff -u $BASE_DIR/old $BASE_DIR/new | grep "^-" | grep -v "^---" | grep -v "^+++" | sed -e 's/^-//'`; do
- pac=$(echo $i | sed -e 's/\([^\.]*\)\.\([^\s]*\)\s\([^$]*\)/\2\/\1-\3.\2.rpm/')
- echo "Fetching $pac"
- /usr/bin/wget -q --timestamping https://$USER:$PASS@download.tz.otcshare.org/${RELEASE_TYPE}/${BUILD_OLD}/repos/pc/x86_64/packages/$pac
-done
-popd
-for i in `diff -u $BASE_DIR/old $BASE_DIR/new | grep "^+" | grep -v "^---" | grep -v "^+++" | sed -e 's/^+//'`; do
- pac=$(echo $i | sed -e 's/\([^\.]*\)\.\([^\s]*\)\s\([^$]*\)/\2\/\1-\3.\2.rpm/')
- pac2=$(echo $i | sed -e 's/\([^\.]*\)\.\([^\s]*\)\s\([^$]*\)/\1.\2/')
- echo "is $pac2 a new package?"
- grep $pac2 $BASE_DIR/new_packages
- if [ $? = 1 ]; then
- pushd update/rpms
- else
- pushd update/new
- fi
- echo "Fetching $pac"
- /usr/bin/wget -q --timestamping https://$USER:$PASS@download.tz.otcshare.org/${RELEASE_TYPE}/${BUILD_NEW}/repos/pc/x86_64/packages/$pac
- popd
-
-done
-IFS=$OLD_IFS
-
-
-#rm $BASE_DIR/new $BASE_DIR/old $BASE_DIR/p0 $BASE_DIR/{p1,new_packages}
--- /dev/null
+#!/usr/bin/python
+
+import os
+import shutil
+import sys
+from optparse import OptionParser
+from ConfigParser import SafeConfigParser
+
+from updateutils import get_package_list, download, parse_patch, create_updateinfo, create_update_file, update_metadata
+
+
+def read_config(config_file):
+ config_file = os.path.expanduser(config_file)
+ parser = SafeConfigParser()
+ parser.read(config_file)
+ return parser
+
+parser = OptionParser()
+parser.add_option('-U', '--updatesfile', metavar='UPDATES',
+ help='master updates.xml file')
+parser.add_option('-O', '--original', metavar='ORIGINAL',
+ help='Original and Old package directory')
+
+parser.add_option('-q', '--quiet', action='store_true',
+ help='do not show downloading progress')
+parser.add_option('-d', '--destdir', default='.', metavar='DIR',
+ help='Directory where to store the updates.')
+parser.add_option('-p', '--patch', metavar='TEXT',
+ help='Patch information')
+parser.add_option('-P', '--patchdir', metavar='DIR',
+ help='directory with patch files')
+parser.add_option("-o", "--old", dest="old", metavar="OLD", help="Old snapshot")
+parser.add_option("-n", "--new", dest="new", metavar="NEW", help="New snapshot")
+parser.add_option("-t", "--type", dest="type", metavar="TYPE", help="Release type")
+parser.add_option("-i", "--image", dest="image", metavar="IMAGE", help="Image Name")
+parser.add_option("--username", dest="username", metavar="USERNAME", help="Username for https")
+parser.add_option("--password", dest="password", metavar="PASSWD", help="Password for https")
+
+(opts, args) = parser.parse_args()
+
+config = read_config('~/.swuprc')
+
+DAILY="pc/releases/daily/trunk"
+WEEKLY="pc/releases/weekly/trunk"
+SNAPSHOTS="snapshots/trunk/pc/"
+BASE="https://download.tz.otcshare.org"
+
+if opts.type == "daily":
+ release_url = "%s/%s" %(BASE, DAILY)
+if opts.type == "weekly":
+ release_url = "%s/%s" %(BASE, WEEKLY)
+else:
+ release_url = "%s/%s" %(BASE, SNAPSHOTS)
+
+credentials = [None, None]
+if opts.username:
+ credentials[0] = opts.username
+elif config.has_option('DEFAULT', 'username'):
+ credentials[0] = config.get('DEFAULT', 'username')
+if opts.password:
+ credentials[1] = opts.password
+elif config.has_option('DEFAULT', 'password'):
+ credentials[1] = config.get('DEFAULT', 'password')
+
+# Initialize cache dir
+CACHE_DIR = "cache"
+if config.has_option('DEFAULT', 'cache-dir'):
+ CACHE_DIR = config.get('DEFAULT', 'cache-dir')
+CACHE_DIR = os.path.abspath(os.path.expanduser(CACHE_DIR))
+packages_files_dir = os.path.join(CACHE_DIR, 'packages-files')
+if not os.path.exists(packages_files_dir):
+ os.makedirs(packages_files_dir)
+
+root = os.getcwd()
+if not opts.patch:
+ print "missing opts --patch. You need to point to a patch file (YAML format)"
+ sys.exit(1)
+
+if opts.patchdir:
+ root = opts.patchdir
+
+patch_path = opts.patch
+destination = ""
+if not opts.destdir:
+ destination = root
+else:
+ destination = opts.destdir
+
+# create deltas (primary, deltainfo)
+patch = parse_patch(patch_path)
+patch_id = patch['ID']
+target_dir = "%s/%s" % (root, patch_id)
+
+# Prepare target dir
+if not os.path.exists(target_dir):
+ os.makedirs(target_dir)
+else:
+ print "Cleaning up %s" % target_dir
+ for filename in ['rpms', 'new', 'old']:
+ filepath = os.path.join(target_dir, filename)
+ if os.path.exists(filepath):
+ shutil.rmtree(os.path.join(filepath))
+
+# Get packages
+p1 = get_package_list(opts.image, release_url, opts.old, credentials, target_dir, packages_files_dir)
+p2 = get_package_list(opts.image, release_url, opts.new, credentials, target_dir, packages_files_dir)
+
+pkgs1 = {'%s|%s' % (pkg, attr['arch']) for pkg, attr in p1.iteritems()}
+pkgs2 = {'%s|%s' % (pkg, attr['arch']) for pkg, attr in p2.iteritems()}
+newpkgs = [pkg.split('|')[0] for pkg in pkgs2.difference(pkgs1)]
+
+pkgs1 = {'%s|%s' % (pkg, attr['version']) for pkg, attr in p1.iteritems()}
+pkgs2 = {'%s|%s' % (pkg, attr['version']) for pkg, attr in p2.iteritems()}
+changedpkgs = [pkg.split('|')[0] for pkg in pkgs2.difference(pkgs1) if pkg.split('|')[0] in p1]
+
+cached_pkgs_dir = os.path.join(CACHE_DIR, 'rpms')
+if not os.path.exists(cached_pkgs_dir):
+ os.makedirs(cached_pkgs_dir)
+
+old_pkgs_dir = os.path.join(target_dir, 'old')
+if not os.path.exists(old_pkgs_dir):
+ os.makedirs(old_pkgs_dir)
+new_pkgs_dir = os.path.join(target_dir, 'new')
+if not os.path.exists(new_pkgs_dir):
+ os.makedirs(new_pkgs_dir)
+changed_pkgs_dir = os.path.join(target_dir, 'rpms')
+if not os.path.exists(changed_pkgs_dir):
+ os.makedirs(changed_pkgs_dir)
+
+old_repourl = "%s/%s/repos/pc/x86_64/packages/" % (release_url, opts.old)
+new_repourl = "%s/%s/repos/pc/x86_64/packages/" % (release_url, opts.new)
+
+with open(os.path.join(target_dir, "repourl"), "w") as repourlfile:
+ repourlfile.write("%s\n" % new_repourl)
+
+for p in newpkgs:
+ rpm = "%s-%s.%s.rpm" % (p, p2[p]['version'], p2[p]['arch'])
+ arch = p2[p]['arch']
+ download("%s/%s" % (new_repourl, arch), rpm, credentials, new_pkgs_dir, cached_pkgs_dir)
+
+for p in changedpkgs:
+ rpm = "%s-%s.%s.rpm" % (p, p1[p]['version'], p1[p]['arch'])
+ arch = p1[p]['arch']
+ download("%s/%s" % (old_repourl, arch), rpm, credentials, old_pkgs_dir, cached_pkgs_dir)
+ rpm = "%s-%s.%s.rpm" % (p, p2[p]['version'], p2[p]['arch'])
+ download("%s/%s" % (new_repourl, arch), rpm, credentials, changed_pkgs_dir, cached_pkgs_dir)
+
+os.system("createrepo --deltas --oldpackagedirs=%s %s/%s" % (cached_pkgs_dir, root, patch_id))
+
+# create updateinfo
+create_updateinfo(root, patch)
+
+# update repo
+os.system("modifyrepo %s/updateinfo.xml %s/%s/repodata" % (root, root, patch_id))
+
+zip_checksum = create_update_file(patch_path, target_dir, destination, patch_id)
+
+update_metadata(destination, root, opts.updatesfile, patch, zip_checksum)
-#!/usr/bin/python
+import csv
+import urllib2
+import os
+import re, base64
+import shutil
import yaml
from xml.dom import minidom
import rpm
import glob
-from optparse import OptionParser
import sys, os
import zipfile
import hashlib
-import shutil
import fileinput
+
+def http_get(url, credentials=(None, None)):
+ print "Downloading %s" %url
+ request = urllib2.Request(url)
+ if credentials[0] and credentials[1]:
+ base64string = base64.encodestring('%s:%s' % (credentials[0], credentials[1])).replace('\n', '')
+ request.add_header("Authorization", "Basic %s" % base64string)
+ html_page = urllib2.urlopen(request)
+ return html_page
+
+def download(url, fname, credentials, outdir, cachedir):
+ cached_file = os.path.join(cachedir, fname)
+ if os.path.exists(cached_file):
+ print "File cache hit: %s" % fname
+ else:
+ ret = http_get(os.path.join(url, fname), credentials)
+ cache = open(cached_file, "w")
+ cache.write(ret.read())
+ cache.close()
+ if outdir:
+ dest_file = os.path.join(outdir, fname)
+ if not os.path.exists(dest_file):
+ shutil.copy2(cached_file, dest_file)
+
+def get_package_list(image_name, base_url, build_id, credentials, outdir, cachedir):
+ cache_file = "%s/%s-%s.packages" %(cachedir, image_name, build_id )
+ package_file = None
+ if not os.path.exists(cache_file):
+ image_packages = "%s/%s/images/%s/%s-%s.packages" %(base_url, build_id, image_name, image_name, build_id )
+ #print image_packages
+ package_file = http_get(image_packages, credentials)
+ cache = open(cache_file, "w")
+ cache.write(package_file.read())
+ cache.close()
+ with open(cache_file, "rb") as package_file:
+ packages = {}
+ pkgreader = csv.reader(package_file, delimiter=' ', quotechar='|')
+ for row in pkgreader:
+ pkg = row[0].split(".")
+ if len(row)>2:
+ packages[pkg[0]] = {'scm': row[2], 'version': row[1], 'arch': pkg[1]}
+ else:
+ packages[pkg[0]] = {'scm': None, 'version': row[1], 'arch': pkg[1]}
+ shutil.copy2(cache_file, os.path.join(outdir, "packages"))
+
+ return packages
+
+
+
def get_checksum(fileName, checksum_type="sha256", excludeLine="", includeLine=""):
"""Compute sha256 hash of the specified file"""
m = hashlib.sha256()
pkglist.appendChild(collection)
root.appendChild(pkglist)
-def parse_patch( patch_path):
+def parse_patch(patch_path):
print 'Processing patch file:', patch_path
try:
- stream = file("%s" % ( patch_path), 'r')
+ stream = file("%s" % (patch_path), 'r')
except IOError:
- print "Cannot read file: %s/%s" % ( patch_path)
+ print "Cannot read file: %s/%s" % (patch_path)
try:
patch = yaml.load(stream)
f.write(updateinfo_xml)
f.close()
-def create_update_file(target_dir, destination, patch_id):
+def create_update_file(patch_path, target_dir, destination, patch_id):
# create zip file
shutil.copyfile(patch_path, "%s/%s" %(target_dir, patch_id))
zip = zipfile.ZipFile("%s/%s.zip" % (destination, patch_id ), 'w', zipfile.ZIP_DEFLATED)
def update_metadata(destination, root, updates_file, patch, zip_checksum):
# creates updates.xml
patch_id = patch['ID']
- up = Updates(cache=opts.updatesfile)
+ up = Updates(cache=updates_file)
up.add_update(patch, "%s.zip" %patch_id, zip_checksum)
# save to file
updates_xml = up.doc.toxml()
for line in fileinput.input("%s/data/updatemd.xml" %destination, inplace=1):
print line.replace("repomd", "updatemd"),
-
-
-parser = OptionParser()
-parser.add_option('-u', '--updateinfo', metavar='TEXT',
- help='cached meta updateinfo file')
-parser.add_option('-U', '--updatesfile', metavar='UPDATES',
- help='master updates.xml file')
-parser.add_option('-O', '--original', metavar='ORIGINAL',
- help='Original and Old package directory')
-
-parser.add_option('-q', '--quiet', action='store_true',
- help='do not show downloading progress')
-parser.add_option('-d', '--destdir', default='.', metavar='DIR',
- help='Directory where to store the updates.')
-parser.add_option('-p', '--patch', metavar='TEXT',
- help='Patch information')
-parser.add_option('-P', '--patchdir', metavar='DIR',
- help='directory with patch files')
-parser.add_option('-t', '--testing', action='store_true',
- help='test updates')
-
-(opts, args) = parser.parse_args()
-
-root = os.getcwd()
-if not opts.patch:
- print "missing options --patch. You need to point to a patch file (YAML format)"
- sys.exit(1)
-
-if opts.patchdir:
- root = opts.patchdir
-
-patch_path = opts.patch
-destination = ""
-if not opts.destdir:
- destination = root
-else:
- destination = opts.destdir
-
-# create deltas (primary, deltainfo)
-patch = parse_patch ( patch_path)
-patch_id = patch['ID']
-target_dir = "%s/%s" % (root, patch_id)
-
-os.system("createrepo --deltas --oldpackagedirs=%s %s/%s" % (opts.original, root, patch_id))
-
-# create updateinfo
-create_updateinfo(root, patch)
-
-# update repo
-os.system("modifyrepo %s/updateinfo.xml %s/%s/repodata" % (root, root, patch_id))
-
-zip_checksum = create_update_file(target_dir, destination, patch_id)
-
-update_metadata(destination, root, opts.updatesfile, patch, zip_checksum)