3 # Copyright (c) 2010, 2011 Intel Inc.
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 from __future__ import with_statement
29 import sqlite3 as sqlite
34 from xml.etree import cElementTree
37 xmlparse = cElementTree.parse
40 from fs_related import *
41 from rpmmisc import myurlgrab
42 from proxy import get_proxy_for
47 def save_ksconf_file(ksconf, release="latest", arch="ia32"):
48 if not os.path.exists(ksconf):
51 with open(ksconf) as f:
54 if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
55 ksc = ksc.replace("@ARCH@", arch)
56 ksc = ksc.replace("@BUILD_ID@", release)
57 fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
61 msger.debug('new ks path %s' % ksconf)
65 def check_meego_chroot(rootdir):
66 if not os.path.exists(rootdir + "/etc/moblin-release") and \
67 not os.path.exists(rootdir + "/etc/meego-release"):
68 raise CreatorError("Directory %s is not a MeeGo/Tizen chroot env"\
71 if not os.path.exists(rootdir + "/etc/inittab") or \
72 not os.path.exists(rootdir + "/etc/rc.sysinit"):
73 raise CreatorError("Lack of init scripts under %s: /etc/inittab, "\
74 "/etc/rc.sysinit" % rootdir)
76 if not glob.glob(rootdir + "/boot/vmlinuz-*"):
77 raise CreatorError("Failed to find kernel module under %s" % rootdir)
81 def get_image_type(path):
83 def _get_extension_name(path):
84 match = re.search("(?<=\.)\w+$", path)
90 if os.path.isdir(path):
91 check_meego_chroot(path)
102 extension = _get_extension_name(path)
103 if extension in maptab:
104 return maptab[extension]
106 fd = open(path, "rb")
107 file_header = fd.read(1024)
109 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
110 if file_header[0:len(vdi_flag)] == vdi_flag:
113 output = runner.outs(['file', path])
114 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
115 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
116 rawptn = re.compile(r".*x86 boot sector.*")
117 vmdkptn = re.compile(r".*VMware. disk image.*")
118 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
119 ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
120 btrfsimgptn = re.compile(r".*BTRFS.*")
121 if isoptn.match(output):
123 elif usbimgptn.match(output):
124 return maptab["usbimg"]
125 elif rawptn.match(output):
127 elif vmdkptn.match(output):
128 return maptab["vmdk"]
129 elif ext3fsimgptn.match(output):
131 elif ext4fsimgptn.match(output):
133 elif btrfsimgptn.match(output):
136 raise CreatorError("Cannot detect the type of image: %s" % path)
138 def get_file_size(file):
139 """ Return size in MB unit, TODO: rewrite """
140 rc, duOutput = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
142 raise CreatorError("Failed to run %s" % du)
144 size1 = int(duOutput.split()[0])
145 rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
147 raise CreatorError("Failed to run %s" % du)
149 size2 = int(duOutput.split()[0])
155 def get_filesystem_avail(fs):
156 vfstat = os.statvfs(fs)
157 return vfstat.f_bavail * vfstat.f_bsize
159 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
162 raise CreatorError("Invalid destination image format: %s" % dstfmt)
163 msger.debug("converting %s image to %s" % (srcimg, dstimg))
165 path = find_binary_path("qemu-img")
166 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
167 elif srcfmt == "vdi":
168 path = find_binary_path("VBoxManage")
169 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
171 raise CreatorError("Invalid soure image format: %s" % srcfmt)
173 rc = runner.show(argv)
175 msger.debug("convert successful")
177 raise CreatorError("Unable to convert disk to %s" % dstfmt)
179 def uncompress_squashfs(squashfsimg, outdir):
180 """Uncompress file system from squshfs image"""
181 unsquashfs = find_binary_path("unsquashfs")
182 args = [ unsquashfs, "-d", outdir, squashfsimg ]
183 rc = runner.show(args)
185 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
187 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
189 return tempfile.mkdtemp(dir = dir, prefix = prefix)
191 def get_temp_reponame(baseurl):
192 md5obj = hashlib.md5(baseurl)
193 tmpreponame = "%s" % md5obj.hexdigest()
196 def get_repostrs_from_ks(ks):
198 for repodata in ks.handler.repo.repoList:
200 if hasattr(repodata, "name") and repodata.name:
201 repostr += ",name:" + repodata.name
202 if hasattr(repodata, "baseurl") and repodata.baseurl:
203 repostr += ",baseurl:" + repodata.baseurl
204 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
205 repostr += ",mirrorlist:" + repodata.mirrorlist
206 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
207 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
208 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
209 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
210 if hasattr(repodata, "cost") and repodata.cost:
211 repostr += ",cost:%d" % repodata.cost
212 if hasattr(repodata, "save") and repodata.save:
214 if hasattr(repodata, "proxy") and repodata.proxy:
215 repostr += ",proxy:" + repodata.proxy
216 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
217 repostr += ",proxyuser:" + repodata.proxy_username
218 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
219 repostr += ",proxypasswd:" + repodata.proxy_password
220 if repostr.find("name:") == -1:
221 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
222 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
223 repostr += ",debuginfo:"
224 if hasattr(repodata, "source") and repodata.source:
225 repostr += ",source:"
226 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
227 repostr += ",gpgkey:" + repodata.gpgkey
228 kickstart_repos.append(repostr[1:])
229 return kickstart_repos
231 def get_uncompressed_data_from_url(url, filename, proxies):
232 filename = myurlgrab(url, filename, proxies)
234 if filename.endswith(".gz"):
236 runner.quiet(['gunzip', "-f", filename])
237 elif filename.endswith(".bz2"):
239 runner.quiet(['bunzip2', "-f", filename])
241 filename = filename.replace(suffix, "")
244 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
245 url = str(baseurl + "/" + filename)
246 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
247 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
249 def get_metadata_from_repos(repostrs, cachedir):
250 my_repo_metadata = []
251 for repostr in repostrs:
255 items = repostr.split(",")
257 subitems = item.split(":")
258 if subitems[0] == "name":
259 reponame = subitems[1]
260 if subitems[0] == "baseurl":
262 if subitems[0] == "proxy":
264 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
267 proxy = get_proxy_for(baseurl)
270 proxies = {str(proxy.split(":")[0]):str(proxy)}
271 makedirs(cachedir + "/" + reponame)
272 url = str(baseurl + "/repodata/repomd.xml")
273 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
274 repomd = myurlgrab(url, filename, proxies)
276 root = xmlparse(repomd)
278 raise CreatorError("repomd.xml syntax error.")
280 ns = root.getroot().tag
281 ns = ns[0:ns.rindex("}")+1]
284 for elm in root.getiterator("%sdata" % ns):
285 if elm.attrib["type"] == "patterns":
286 patterns = elm.find("%slocation" % ns).attrib['href']
290 for elm in root.getiterator("%sdata" % ns):
291 if elm.attrib["type"] == "group_gz":
292 comps = elm.find("%slocation" % ns).attrib['href']
295 for elm in root.getiterator("%sdata" % ns):
296 if elm.attrib["type"] == "group":
297 comps = elm.find("%slocation" % ns).attrib['href']
301 for elm in root.getiterator("%sdata" % ns):
302 if elm.attrib["type"] == "primary_db":
303 primary_type=".sqlite"
307 for elm in root.getiterator("%sdata" % ns):
308 if elm.attrib["type"] == "primary":
315 primary = elm.find("%slocation" % ns).attrib['href']
316 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
319 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
322 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
326 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
329 msger.warning("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
331 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
333 return my_repo_metadata
335 def get_arch(repometadata):
337 for repo in repometadata:
338 if repo["primary"].endswith(".xml"):
339 root = xmlparse(repo["primary"])
340 ns = root.getroot().tag
341 ns = ns[0:ns.rindex("}")+1]
342 for elm in root.getiterator("%spackage" % ns):
343 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
344 arch = elm.find("%sarch" % ns).text
345 if arch not in archlist:
346 archlist.append(arch)
347 elif repo["primary"].endswith(".sqlite"):
348 con = sqlite.connect(repo["primary"])
349 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
350 if row[0] not in archlist:
351 archlist.append(row[0])
356 for i in range(len(archlist)):
357 if archlist[i] not in rpmmisc.archPolicies.keys():
361 while j < len(uniq_arch):
362 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
365 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
367 uniq_arch[j] = archlist[i]
370 uniq_arch.remove(uniq_arch[j])
374 uniq_arch.append(archlist[i])
376 return uniq_arch, archlist
378 def get_package(pkg, repometadata, arch = None):
381 for repo in repometadata:
382 if repo["primary"].endswith(".xml"):
383 root = xmlparse(repo["primary"])
384 ns = root.getroot().tag
385 ns = ns[0:ns.rindex("}")+1]
386 for elm in root.getiterator("%spackage" % ns):
387 if elm.find("%sname" % ns).text == pkg:
388 if elm.find("%sarch" % ns).text != "src":
389 version = elm.find("%sversion" % ns)
390 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
393 location = elm.find("%slocation" % ns)
394 pkgpath = "%s" % location.attrib['href']
397 if repo["primary"].endswith(".sqlite"):
398 con = sqlite.connect(repo["primary"])
400 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
401 tmpver = "%s-%s" % (row[0], row[1])
403 pkgpath = "%s" % row[2]
407 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
408 tmpver = "%s-%s" % (row[0], row[1])
410 pkgpath = "%s" % row[2]
415 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
416 url = str(target_repo["baseurl"] + "/" + pkgpath)
417 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
418 pkg = myurlgrab(url, filename, target_repo["proxies"])
423 def get_source_name(pkg, repometadata):
425 def get_bin_name(pkg):
426 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
431 def get_src_name(srpm):
432 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
440 pkg_name = get_bin_name(pkg)
444 for repo in repometadata:
445 if repo["primary"].endswith(".xml"):
446 root = xmlparse(repo["primary"])
447 ns = root.getroot().tag
448 ns = ns[0:ns.rindex("}")+1]
449 for elm in root.getiterator("%spackage" % ns):
450 if elm.find("%sname" % ns).text == pkg_name:
451 if elm.find("%sarch" % ns).text != "src":
452 version = elm.find("%sversion" % ns)
453 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
456 fmt = elm.find("%sformat" % ns)
458 fns = fmt.getchildren()[0].tag
459 fns = fns[0:fns.rindex("}")+1]
460 pkgpath = fmt.find("%ssourcerpm" % fns).text
464 if repo["primary"].endswith(".sqlite"):
465 con = sqlite.connect(repo["primary"])
466 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
467 tmpver = "%s-%s" % (row[0], row[1])
469 pkgpath = "%s" % row[2]
474 return get_src_name(pkgpath)
478 def get_release_no(repometadata, distro="meego"):
481 cpio = find_binary_path("cpio")
482 rpm2cpio = find_binary_path("rpm2cpio")
483 release_pkg = get_package("%s-release" % distro, repometadata)
488 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
489 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
491 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
495 shutil.rmtree(tmpdir, ignore_errors = True)
496 return content.split(" ")[2]
500 def get_kickstarts_from_repos(repometadata):
502 for repo in repometadata:
504 root = xmlparse(repo["repomd"])
506 raise CreatorError("repomd.xml syntax error.")
508 ns = root.getroot().tag
509 ns = ns[0:ns.rindex("}")+1]
511 for elm in root.getiterator("%sdata" % ns):
512 if elm.attrib["type"] == "image-config":
515 if elm.attrib["type"] != "image-config":
518 location = elm.find("%slocation" % ns)
519 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
520 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
522 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
525 root = xmlparse(image_config)
527 raise CreatorError("image-config.xml syntax error.")
529 for elm in root.getiterator("config"):
530 path = elm.find("path").text
531 path = path.replace("images-config", "image-config")
532 description = elm.find("description").text
533 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
535 if "http" not in path:
536 url = str(repo["baseurl"] + "/" + path)
537 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
538 path = myurlgrab(url, filename, repo["proxies"])
539 kickstarts.append({"filename":path,"description":description})
542 def select_ks(ksfiles):
543 msger.info("Available kickstart files:")
547 msger.raw("\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"])))
550 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
551 if choice.lower() == "q":
555 if choice >= 1 and choice <= i:
558 return ksfiles[choice-1]["filename"]
560 def get_pkglist_in_patterns(group, patterns):
564 root = xmlparse(patterns)
566 raise SyntaxError("%s syntax error." % patterns)
568 for elm in list(root.getroot()):
570 ns = ns[0:ns.rindex("}")+1]
571 name = elm.find("%sname" % ns)
572 summary = elm.find("%ssummary" % ns)
573 if name.text == group or summary.text == group:
581 for requires in list(elm):
582 if requires.tag.endswith("requires"):
589 for pkg in list(requires):
590 pkgname = pkg.attrib["name"]
591 if pkgname not in pkglist:
592 pkglist.append(pkgname)
596 def get_pkglist_in_comps(group, comps):
600 root = xmlparse(comps)
602 raise SyntaxError("%s syntax error." % comps)
604 for elm in root.getiterator("group"):
606 name = elm.find("name")
607 if id.text == group or name.text == group:
608 packagelist = elm.find("packagelist")
615 for require in elm.getiterator("packagereq"):
616 if require.tag.endswith("packagereq"):
617 pkgname = require.text
618 if pkgname not in pkglist:
619 pkglist.append(pkgname)
623 def is_statically_linked(binary):
624 return ", statically linked, " in runner.outs(['file', binary])
626 def setup_qemu_emulator(rootdir, arch):
627 # mount binfmt_misc if it doesn't exist
628 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
629 modprobecmd = find_binary_path("modprobe")
630 runner.show([modprobecmd, "binfmt_misc"])
631 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
632 mountcmd = find_binary_path("mount")
633 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
635 # qemu_emulator is a special case, we can't use find_binary_path
636 # qemu emulator should be a statically-linked executable file
637 qemu_emulator = "/usr/bin/qemu-arm"
638 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
639 qemu_emulator = "/usr/bin/qemu-arm-static"
640 if not os.path.exists(qemu_emulator):
641 raise CreatorError("Please install a statically-linked qemu-arm")
643 # qemu emulator version check
644 armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
645 if arch in armv7_list: # need qemu (>=0.13.0)
646 qemuout = runner.outs([qemu_emulator, "-h"])
647 m = re.search("version\s*([.\d]+)", qemuout)
649 qemu_version = m.group(1)
650 if qemu_version < "0.13":
651 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
653 msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
655 if not os.path.exists(rootdir + "/usr/bin"):
656 makedirs(rootdir + "/usr/bin")
657 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
659 # disable selinux, selinux will block qemu emulator to run
660 if os.path.exists("/usr/sbin/setenforce"):
661 msger.info('Try to disable selinux')
662 runner.show(["/usr/sbin/setenforce", "0"])
664 node = "/proc/sys/fs/binfmt_misc/arm"
665 if is_statically_linked(qemu_emulator) and os.path.exists(node):
668 # unregister it if it has been registered and is a dynamically-linked executable
669 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
670 qemu_unregister_string = "-1\n"
671 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
672 fd.write(qemu_unregister_string)
675 # register qemu emulator for interpreting other arch executable file
676 if not os.path.exists(node):
677 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
678 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
679 fd.write(qemu_arm_string)
684 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
685 def get_source_repometadata(repometadata):
687 for repo in repometadata:
688 if repo["name"].endswith("-source"):
689 src_repometadata.append(repo)
691 return src_repometadata
694 def get_src_name(srpm):
695 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
700 src_repometadata = get_source_repometadata(repometadata)
702 if not src_repometadata:
703 msger.warning("No source repo found")
709 for repo in src_repometadata:
710 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
711 lpkgs_path += glob.glob(cachepath)
713 for lpkg in lpkgs_path:
714 lpkg_name = get_src_name(os.path.basename(lpkg))
715 lpkgs_dict[lpkg_name] = lpkg
716 localpkgs = lpkgs_dict.keys()
719 destdir = instroot+'/usr/src/SRPMS'
720 if not os.path.exists(destdir):
725 srcpkg_name = get_source_name(_pkg, repometadata)
728 srcpkgset.add(srcpkg_name)
730 for pkg in list(srcpkgset):
733 shutil.copy(lpkgs_dict[pkg], destdir)
734 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
736 src_pkg = get_package(pkg, src_repometadata, 'src')
738 shutil.copy(src_pkg, destdir)
739 src_pkgs.append(src_pkg)
740 msger.info("%d source packages gotten from cache" %cached_count)
744 def strip_end(text, suffix):
745 if not text.endswith(suffix):
747 return text[:-len(suffix)]