3 # Copyright (c) 2010, 2011 Intel Inc.
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 from __future__ import with_statement
29 import sqlite3 as sqlite
34 from xml.etree import cElementTree
37 xmlparse = cElementTree.parse
40 from fs_related import *
41 from proxy import get_proxy_for
46 def save_ksconf_file(ksconf, release="latest", arch="ia32"):
47 if not os.path.exists(ksconf):
50 with open(ksconf) as f:
53 if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
54 ksc = ksc.replace("@ARCH@", arch)
55 ksc = ksc.replace("@BUILD_ID@", release)
56 fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
60 msger.debug('new ks path %s' % ksconf)
64 def get_image_type(path):
66 def _get_extension_name(path):
67 match = re.search("(?<=\.)\w+$", path)
73 def _ismeego(rootdir):
74 if (os.path.exists(rootdir + "/etc/moblin-release") \
75 or os.path.exists(rootdir + "/etc/meego-release")) \
76 and os.path.exists(rootdir + "/etc/inittab") \
77 and os.path.exists(rootdir + "/etc/rc.sysinit") \
78 and glob.glob(rootdir + "/boot/vmlinuz-*"):
83 if os.path.isdir(path):
86 raise CreatorError("Directory %s is not a MeeGo/Tizen chroot env" % path)
96 extension = _get_extension_name(path)
97 if extension in maptab:
98 return maptab[extension]
100 fd = open(path, "rb")
101 file_header = fd.read(1024)
103 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
104 if file_header[0:len(vdi_flag)] == vdi_flag:
107 output = runner.outs(['file', path])
108 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
109 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
110 rawptn = re.compile(r".*x86 boot sector.*")
111 vmdkptn = re.compile(r".*VMware. disk image.*")
112 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
113 if isoptn.match(output):
115 elif usbimgptn.match(output):
116 return maptab["usbimg"]
117 elif rawptn.match(output):
119 elif vmdkptn.match(output):
120 return maptab["vmdk"]
121 elif ext3fsimgptn.match(output):
124 raise CreatorError("Cannot detect the type of image: %s" % path)
126 def get_file_size(file):
127 """ Return size in MB unit, TODO: rewrite """
128 rc, duOutput = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
130 raise CreatorError("Failed to run %s" % du)
132 size1 = int(duOutput.split()[0])
133 rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
135 raise CreatorError("Failed to run %s" % du)
137 size2 = int(duOutput.split()[0])
143 def get_filesystem_avail(fs):
144 vfstat = os.statvfs(fs)
145 return vfstat.f_bavail * vfstat.f_bsize
147 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
150 raise CreatorError("Invalid destination image format: %s" % dstfmt)
151 msger.debug("converting %s image to %s" % (srcimg, dstimg))
153 path = find_binary_path("qemu-img")
154 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
155 elif srcfmt == "vdi":
156 path = find_binary_path("VBoxManage")
157 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
159 raise CreatorError("Invalid soure image format: %s" % srcfmt)
161 rc = runner.show(argv)
163 msger.debug("convert successful")
165 raise CreatorError("Unable to convert disk to %s" % dstfmt)
167 def uncompress_squashfs(squashfsimg, outdir):
168 """Uncompress file system from squshfs image"""
169 unsquashfs = find_binary_path("unsquashfs")
170 args = [ unsquashfs, "-d", outdir, squashfsimg ]
171 rc = runner.show(args)
173 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
175 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
177 return tempfile.mkdtemp(dir = dir, prefix = prefix)
179 def get_temp_reponame(baseurl):
180 md5obj = hashlib.md5(baseurl)
181 tmpreponame = "%s" % md5obj.hexdigest()
184 def get_repostrs_from_ks(ks):
186 for repodata in ks.handler.repo.repoList:
188 if hasattr(repodata, "name") and repodata.name:
189 repostr += ",name:" + repodata.name
190 if hasattr(repodata, "baseurl") and repodata.baseurl:
191 repostr += ",baseurl:" + repodata.baseurl
192 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
193 repostr += ",mirrorlist:" + repodata.mirrorlist
194 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
195 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
196 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
197 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
198 if hasattr(repodata, "cost") and repodata.cost:
199 repostr += ",cost:%d" % repodata.cost
200 if hasattr(repodata, "save") and repodata.save:
202 if hasattr(repodata, "proxy") and repodata.proxy:
203 repostr += ",proxy:" + repodata.proxy
204 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
205 repostr += ",proxyuser:" + repodata.proxy_username
206 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
207 repostr += ",proxypasswd:" + repodata.proxy_password
208 if repostr.find("name:") == -1:
209 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
210 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
211 repostr += ",debuginfo:"
212 if hasattr(repodata, "source") and repodata.source:
213 repostr += ",source:"
214 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
215 repostr += ",gpgkey:" + repodata.gpgkey
216 kickstart_repos.append(repostr[1:])
217 return kickstart_repos
219 def get_uncompressed_data_from_url(url, filename, proxies):
220 filename = myurlgrab(url, filename, proxies)
222 if filename.endswith(".gz"):
224 runner.quiet(['gunzip', "-f", filename])
225 elif filename.endswith(".bz2"):
227 runner.quiet(['bunzip2', "-f", filename])
229 filename = filename.replace(suffix, "")
232 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
233 url = str(baseurl + "/" + filename)
234 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
235 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
237 def get_metadata_from_repos(repostrs, cachedir):
238 my_repo_metadata = []
239 for repostr in repostrs:
243 items = repostr.split(",")
245 subitems = item.split(":")
246 if subitems[0] == "name":
247 reponame = subitems[1]
248 if subitems[0] == "baseurl":
250 if subitems[0] == "proxy":
252 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
255 proxy = get_proxy_for(baseurl)
258 proxies = {str(proxy.split(":")[0]):str(proxy)}
259 makedirs(cachedir + "/" + reponame)
260 url = str(baseurl + "/repodata/repomd.xml")
261 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
262 repomd = myurlgrab(url, filename, proxies)
264 root = xmlparse(repomd)
266 raise CreatorError("repomd.xml syntax error.")
268 ns = root.getroot().tag
269 ns = ns[0:ns.rindex("}")+1]
272 for elm in root.getiterator("%sdata" % ns):
273 if elm.attrib["type"] == "patterns":
274 patterns = elm.find("%slocation" % ns).attrib['href']
278 for elm in root.getiterator("%sdata" % ns):
279 if elm.attrib["type"] == "group_gz":
280 comps = elm.find("%slocation" % ns).attrib['href']
283 for elm in root.getiterator("%sdata" % ns):
284 if elm.attrib["type"] == "group":
285 comps = elm.find("%slocation" % ns).attrib['href']
289 for elm in root.getiterator("%sdata" % ns):
290 if elm.attrib["type"] == "primary_db":
291 primary_type=".sqlite"
295 for elm in root.getiterator("%sdata" % ns):
296 if elm.attrib["type"] == "primary":
303 primary = elm.find("%slocation" % ns).attrib['href']
304 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
307 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
310 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
314 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
317 msger.warning("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
319 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
321 return my_repo_metadata
323 def get_arch(repometadata):
325 for repo in repometadata:
326 if repo["primary"].endswith(".xml"):
327 root = xmlparse(repo["primary"])
328 ns = root.getroot().tag
329 ns = ns[0:ns.rindex("}")+1]
330 for elm in root.getiterator("%spackage" % ns):
331 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
332 arch = elm.find("%sarch" % ns).text
333 if arch not in archlist:
334 archlist.append(arch)
335 elif repo["primary"].endswith(".sqlite"):
336 con = sqlite.connect(repo["primary"])
337 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
338 if row[0] not in archlist:
339 archlist.append(row[0])
344 for i in range(len(archlist)):
345 if archlist[i] not in rpmmisc.archPolicies.keys():
349 while j < len(uniq_arch):
350 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
353 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
355 uniq_arch[j] = archlist[i]
358 uniq_arch.remove(uniq_arch[j])
362 uniq_arch.append(archlist[i])
366 def get_package(pkg, repometadata, arch = None):
369 for repo in repometadata:
370 if repo["primary"].endswith(".xml"):
371 root = xmlparse(repo["primary"])
372 ns = root.getroot().tag
373 ns = ns[0:ns.rindex("}")+1]
374 for elm in root.getiterator("%spackage" % ns):
375 if elm.find("%sname" % ns).text == pkg:
376 if elm.find("%sarch" % ns).text != "src":
377 version = elm.find("%sversion" % ns)
378 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
381 location = elm.find("%slocation" % ns)
382 pkgpath = "%s" % location.attrib['href']
385 if repo["primary"].endswith(".sqlite"):
386 con = sqlite.connect(repo["primary"])
388 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
389 tmpver = "%s-%s" % (row[0], row[1])
391 pkgpath = "%s" % row[2]
395 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
396 tmpver = "%s-%s" % (row[0], row[1])
398 pkgpath = "%s" % row[2]
403 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
404 url = str(target_repo["baseurl"] + "/" + pkgpath)
405 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
406 pkg = myurlgrab(url, filename, target_repo["proxies"])
411 def get_source_name(pkg, repometadata):
413 def get_bin_name(pkg):
414 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
419 def get_src_name(srpm):
420 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
428 pkg_name = get_bin_name(pkg)
432 for repo in repometadata:
433 if repo["primary"].endswith(".xml"):
434 root = xmlparse(repo["primary"])
435 ns = root.getroot().tag
436 ns = ns[0:ns.rindex("}")+1]
437 for elm in root.getiterator("%spackage" % ns):
438 if elm.find("%sname" % ns).text == pkg_name:
439 if elm.find("%sarch" % ns).text != "src":
440 version = elm.find("%sversion" % ns)
441 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
444 fmt = elm.find("%sformat" % ns)
446 fns = fmt.getchildren()[0].tag
447 fns = fns[0:fns.rindex("}")+1]
448 pkgpath = fmt.find("%ssourcerpm" % fns).text
452 if repo["primary"].endswith(".sqlite"):
453 con = sqlite.connect(repo["primary"])
454 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
455 tmpver = "%s-%s" % (row[0], row[1])
457 pkgpath = "%s" % row[2]
462 return get_src_name(pkgpath)
466 def get_release_no(repometadata, distro="meego"):
469 cpio = find_binary_path("cpio")
470 rpm2cpio = find_binary_path("rpm2cpio")
471 release_pkg = get_package("%s-release" % distro, repometadata)
476 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
477 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
479 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
483 shutil.rmtree(tmpdir, ignore_errors = True)
484 return content.split(" ")[2]
488 def get_kickstarts_from_repos(repometadata):
490 for repo in repometadata:
492 root = xmlparse(repo["repomd"])
494 raise CreatorError("repomd.xml syntax error.")
496 ns = root.getroot().tag
497 ns = ns[0:ns.rindex("}")+1]
499 for elm in root.getiterator("%sdata" % ns):
500 if elm.attrib["type"] == "image-config":
503 if elm.attrib["type"] != "image-config":
506 location = elm.find("%slocation" % ns)
507 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
508 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
510 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
513 root = xmlparse(image_config)
515 raise CreatorError("image-config.xml syntax error.")
517 for elm in root.getiterator("config"):
518 path = elm.find("path").text
519 path = path.replace("images-config", "image-config")
520 description = elm.find("description").text
521 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
523 if "http" not in path:
524 url = str(repo["baseurl"] + "/" + path)
525 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
526 path = myurlgrab(url, filename, repo["proxies"])
527 kickstarts.append({"filename":path,"description":description})
530 def select_ks(ksfiles):
531 msger.info("Available kickstart files:")
535 msger.raw("\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"])))
538 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
539 if choice.lower() == "q":
543 if choice >= 1 and choice <= i:
546 return ksfiles[choice-1]["filename"]
548 def get_pkglist_in_patterns(group, patterns):
552 root = xmlparse(patterns)
554 raise SyntaxError("%s syntax error." % patterns)
556 for elm in list(root.getroot()):
558 ns = ns[0:ns.rindex("}")+1]
559 name = elm.find("%sname" % ns)
560 summary = elm.find("%ssummary" % ns)
561 if name.text == group or summary.text == group:
569 for requires in list(elm):
570 if requires.tag.endswith("requires"):
577 for pkg in list(requires):
578 pkgname = pkg.attrib["name"]
579 if pkgname not in pkglist:
580 pkglist.append(pkgname)
584 def get_pkglist_in_comps(group, comps):
588 root = xmlparse(comps)
590 raise SyntaxError("%s syntax error." % comps)
592 for elm in root.getiterator("group"):
594 name = elm.find("name")
595 if id.text == group or name.text == group:
596 packagelist = elm.find("packagelist")
603 for require in elm.getiterator("packagereq"):
604 if require.tag.endswith("packagereq"):
605 pkgname = require.text
606 if pkgname not in pkglist:
607 pkglist.append(pkgname)
611 def is_statically_linked(binary):
612 return ", statically linked, " in runner.outs(['file', binary])
614 def setup_qemu_emulator(rootdir, arch):
615 # mount binfmt_misc if it doesn't exist
616 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
617 modprobecmd = find_binary_path("modprobe")
618 runner.show([modprobecmd, "binfmt_misc"])
619 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
620 mountcmd = find_binary_path("mount")
621 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
623 # qemu_emulator is a special case, we can't use find_binary_path
624 # qemu emulator should be a statically-linked executable file
625 qemu_emulator = "/usr/bin/qemu-arm"
626 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
627 qemu_emulator = "/usr/bin/qemu-arm-static"
628 if not os.path.exists(qemu_emulator):
629 raise CreatorError("Please install a statically-linked qemu-arm")
631 # qemu emulator version check
632 armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
633 if arch in armv7_list: # need qemu (>=0.13.0)
634 qemuout = runner.outs([qemu_emulator, "-h"])
635 m = re.search("version\s*([.\d]+)", qemuout)
637 qemu_version = m.group(1)
638 if qemu_version < "0.13":
639 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
641 msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
643 if not os.path.exists(rootdir + "/usr/bin"):
644 makedirs(rootdir + "/usr/bin")
645 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
647 # disable selinux, selinux will block qemu emulator to run
648 if os.path.exists("/usr/sbin/setenforce"):
649 msger.info('Try to disable selinux')
650 runner.show(["/usr/sbin/setenforce", "0"])
652 node = "/proc/sys/fs/binfmt_misc/arm"
653 if is_statically_linked(qemu_emulator) and os.path.exists(node):
656 # unregister it if it has been registered and is a dynamically-linked executable
657 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
658 qemu_unregister_string = "-1\n"
659 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
660 fd.write(qemu_unregister_string)
663 # register qemu emulator for interpreting other arch executable file
664 if not os.path.exists(node):
665 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
666 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
667 fd.write(qemu_arm_string)
672 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
673 def get_source_repometadata(repometadata):
675 for repo in repometadata:
676 if repo["name"].endswith("-source"):
677 src_repometadata.append(repo)
679 return src_repometadata
682 def get_src_name(srpm):
683 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
688 src_repometadata = get_source_repometadata(repometadata)
690 if not src_repometadata:
691 msger.warning("No source repo found")
697 for repo in src_repometadata:
698 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
699 lpkgs_path += glob.glob(cachepath)
701 for lpkg in lpkgs_path:
702 lpkg_name = get_src_name(os.path.basename(lpkg))
703 lpkgs_dict[lpkg_name] = lpkg
704 localpkgs = lpkgs_dict.keys()
707 destdir = instroot+'/usr/src/SRPMS'
708 if not os.path.exists(destdir):
713 srcpkg_name = get_source_name(_pkg, repometadata)
716 srcpkgset.add(srcpkg_name)
718 for pkg in list(srcpkgset):
721 shutil.copy(lpkgs_dict[pkg], destdir)
722 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
724 src_pkg = get_package(pkg, src_repometadata, 'src')
726 shutil.copy(src_pkg, destdir)
727 src_pkgs.append(src_pkg)
728 msger.info("%d source packages gotten from cache" %cached_count)
732 def strip_end(text, suffix):
733 if not text.endswith(suffix):
735 return text[:-len(suffix)]