2 # misc.py : miscellaneous utilities
4 # Copyright 2010, 2011 Intel Inc.
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Library General Public License for more details.
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 import sqlite3 as sqlite
33 from xml.etree import cElementTree
36 xmlparse = cElementTree.parse
39 from fs_related import *
40 from proxy import get_proxy_for
45 def get_image_type(path):
47 def _get_extension_name(path):
48 match = re.search("(?<=\.)\w+$", path)
54 def _ismeego(rootdir):
55 if (os.path.exists(rootdir + "/etc/moblin-release") \
56 or os.path.exists(rootdir + "/etc/meego-release")) \
57 and os.path.exists(rootdir + "/etc/inittab") \
58 and os.path.exists(rootdir + "/etc/rc.sysinit") \
59 and glob.glob(rootdir + "/boot/vmlinuz-*"):
64 if os.path.isdir(path):
77 extension = _get_extension_name(path)
78 if extension in maptab:
79 return maptab[extension]
82 file_header = fd.read(1024)
84 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
85 if file_header[0:len(vdi_flag)] == vdi_flag:
88 output = runner.outs(['file', path])
89 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
90 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
91 rawptn = re.compile(r".*x86 boot sector.*")
92 vmdkptn = re.compile(r".*VMware. disk image.*")
93 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
94 if isoptn.match(output):
96 elif usbimgptn.match(output):
97 return maptab["usbimg"]
98 elif rawptn.match(output):
100 elif vmdkptn.match(output):
101 return maptab["vmdk"]
102 elif ext3fsimgptn.match(output):
107 def get_file_size(file):
108 """Return size in MB unit"""
109 rc, duOutput = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
111 raise CreatorError("Failed to run %s" % du)
113 size1 = int(duOutput.split()[0])
114 rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
116 raise CreatorError("Failed to run %s" % du)
118 size2 = int(duOutput.split()[0])
124 def get_filesystem_avail(fs):
125 vfstat = os.statvfs(fs)
126 return vfstat.f_bavail * vfstat.f_bsize
128 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
131 raise CreatorError("Invalid destination image format: %s" % dstfmt)
132 msger.debug("converting %s image to %s" % (srcimg, dstimg))
134 path = find_binary_path("qemu-img")
135 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
136 elif srcfmt == "vdi":
137 path = find_binary_path("VBoxManage")
138 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
140 raise CreatorError("Invalid soure image format: %s" % srcfmt)
142 rc = runner.show(argv)
144 msger.debug("convert successful")
146 raise CreatorError("Unable to convert disk to %s" % dstfmt)
148 def uncompress_squashfs(squashfsimg, outdir):
149 """Uncompress file system from squshfs image"""
150 unsquashfs = find_binary_path("unsquashfs")
151 args = [ unsquashfs, "-d", outdir, squashfsimg ]
152 rc = runner.show(args)
154 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
156 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
158 return tempfile.mkdtemp(dir = dir, prefix = prefix)
160 def get_temp_reponame(baseurl):
161 md5obj = hashlib.md5(baseurl)
162 tmpreponame = "%s" % md5obj.hexdigest()
165 def get_repostrs_from_ks(ks):
167 for repodata in ks.handler.repo.repoList:
169 if hasattr(repodata, "name") and repodata.name:
170 repostr += ",name:" + repodata.name
171 if hasattr(repodata, "baseurl") and repodata.baseurl:
172 repostr += ",baseurl:" + repodata.baseurl
173 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
174 repostr += ",mirrorlist:" + repodata.mirrorlist
175 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
176 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
177 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
178 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
179 if hasattr(repodata, "cost") and repodata.cost:
180 repostr += ",cost:%d" % repodata.cost
181 if hasattr(repodata, "save") and repodata.save:
183 if hasattr(repodata, "proxy") and repodata.proxy:
184 repostr += ",proxy:" + repodata.proxy
185 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
186 repostr += ",proxyuser:" + repodata.proxy_username
187 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
188 repostr += ",proxypasswd:" + repodata.proxy_password
189 if repostr.find("name:") == -1:
190 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
191 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
192 repostr += ",debuginfo:"
193 if hasattr(repodata, "source") and repodata.source:
194 repostr += ",source:"
195 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
196 repostr += ",gpgkey:" + repodata.gpgkey
197 kickstart_repos.append(repostr[1:])
198 return kickstart_repos
200 def get_uncompressed_data_from_url(url, filename, proxies):
201 filename = myurlgrab(url, filename, proxies)
203 if filename.endswith(".gz"):
205 gunzip = find_binary_path('gunzip')
206 runner.show([gunzip, "-f", filename])
207 elif filename.endswith(".bz2"):
209 bunzip2 = find_binary_path('bunzip2')
210 runner.show([bunzip2, "-f", filename])
212 filename = filename.replace(suffix, "")
215 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
216 url = str(baseurl + "/" + filename)
217 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
218 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
220 def get_metadata_from_repos(repostrs, cachedir):
221 my_repo_metadata = []
222 for repostr in repostrs:
226 items = repostr.split(",")
228 subitems = item.split(":")
229 if subitems[0] == "name":
230 reponame = subitems[1]
231 if subitems[0] == "baseurl":
233 if subitems[0] == "proxy":
235 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
238 proxy = get_proxy_for(baseurl)
241 proxies = {str(proxy.split(":")[0]):str(proxy)}
242 makedirs(cachedir + "/" + reponame)
243 url = str(baseurl + "/repodata/repomd.xml")
244 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
245 repomd = myurlgrab(url, filename, proxies)
247 root = xmlparse(repomd)
249 raise CreatorError("repomd.xml syntax error.")
251 ns = root.getroot().tag
252 ns = ns[0:ns.rindex("}")+1]
255 for elm in root.getiterator("%sdata" % ns):
256 if elm.attrib["type"] == "patterns":
257 patterns = elm.find("%slocation" % ns).attrib['href']
261 for elm in root.getiterator("%sdata" % ns):
262 if elm.attrib["type"] == "group_gz":
263 comps = elm.find("%slocation" % ns).attrib['href']
266 for elm in root.getiterator("%sdata" % ns):
267 if elm.attrib["type"] == "group":
268 comps = elm.find("%slocation" % ns).attrib['href']
272 for elm in root.getiterator("%sdata" % ns):
273 if elm.attrib["type"] == "primary_db":
274 primary_type=".sqlite"
278 for elm in root.getiterator("%sdata" % ns):
279 if elm.attrib["type"] == "primary":
286 primary = elm.find("%slocation" % ns).attrib['href']
287 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
290 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
293 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
297 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
300 msger.warning("can't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
302 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
304 return my_repo_metadata
306 def get_package(pkg, repometadata, arch = None):
309 for repo in repometadata:
310 if repo["primary"].endswith(".xml"):
311 root = xmlparse(repo["primary"])
312 ns = root.getroot().tag
313 ns = ns[0:ns.rindex("}")+1]
314 for elm in root.getiterator("%spackage" % ns):
315 if elm.find("%sname" % ns).text == pkg:
316 if elm.find("%sarch" % ns).text != "src":
317 version = elm.find("%sversion" % ns)
318 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
321 location = elm.find("%slocation" % ns)
322 pkgpath = "%s" % location.attrib['href']
325 if repo["primary"].endswith(".sqlite"):
326 con = sqlite.connect(repo["primary"])
328 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
329 tmpver = "%s-%s" % (row[0], row[1])
331 pkgpath = "%s" % row[2]
335 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
336 tmpver = "%s-%s" % (row[0], row[1])
338 pkgpath = "%s" % row[2]
343 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
344 url = str(target_repo["baseurl"] + "/" + pkgpath)
345 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
346 pkg = myurlgrab(url, filename, target_repo["proxies"])
351 def get_source_name(pkg, repometadata):
353 def get_bin_name(pkg):
354 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
359 def get_src_name(srpm):
360 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
368 pkg_name = get_bin_name(pkg)
372 for repo in repometadata:
373 if repo["primary"].endswith(".xml"):
374 root = xmlparse(repo["primary"])
375 ns = root.getroot().tag
376 ns = ns[0:ns.rindex("}")+1]
377 for elm in root.getiterator("%spackage" % ns):
378 if elm.find("%sname" % ns).text == pkg_name:
379 if elm.find("%sarch" % ns).text != "src":
380 version = elm.find("%sversion" % ns)
381 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
384 fmt = elm.find("%sformat" % ns)
386 fns = fmt.getchildren()[0].tag
387 fns = fns[0:fns.rindex("}")+1]
388 pkgpath = fmt.find("%ssourcerpm" % fns).text
392 if repo["primary"].endswith(".sqlite"):
393 con = sqlite.connect(repo["primary"])
394 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
395 tmpver = "%s-%s" % (row[0], row[1])
397 pkgpath = "%s" % row[2]
402 return get_src_name(pkgpath)
406 def get_release_no(repometadata, distro="meego"):
409 cpio = find_binary_path("cpio")
410 rpm2cpio = find_binary_path("rpm2cpio")
411 release_pkg = get_package("%s-release" % distro, repometadata)
416 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
417 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
419 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
423 shutil.rmtree(tmpdir, ignore_errors = True)
424 return content.split(" ")[2]
428 def get_kickstarts_from_repos(repometadata):
430 for repo in repometadata:
432 root = xmlparse(repo["repomd"])
434 raise CreatorError("repomd.xml syntax error.")
436 ns = root.getroot().tag
437 ns = ns[0:ns.rindex("}")+1]
439 for elm in root.getiterator("%sdata" % ns):
440 if elm.attrib["type"] == "image-config":
443 if elm.attrib["type"] != "image-config":
446 location = elm.find("%slocation" % ns)
447 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
448 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
450 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
453 root = xmlparse(image_config)
455 raise CreatorError("image-config.xml syntax error.")
457 for elm in root.getiterator("config"):
458 path = elm.find("path").text
459 path = path.replace("images-config", "image-config")
460 description = elm.find("description").text
461 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
463 if "http" not in path:
464 url = str(repo["baseurl"] + "/" + path)
465 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
466 path = myurlgrab(url, filename, repo["proxies"])
467 kickstarts.append({"filename":path,"description":description})
470 def select_ks(ksfiles):
471 msger.info("Available kickstart files:")
475 msger.raw("\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"])))
478 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
479 if choice.lower() == "q":
483 if choice >= 1 and choice <= i:
486 return ksfiles[choice-1]["filename"]
488 def get_pkglist_in_patterns(group, patterns):
492 root = xmlparse(patterns)
494 raise SyntaxError("%s syntax error." % patterns)
496 for elm in list(root.getroot()):
498 ns = ns[0:ns.rindex("}")+1]
499 name = elm.find("%sname" % ns)
500 summary = elm.find("%ssummary" % ns)
501 if name.text == group or summary.text == group:
509 for requires in list(elm):
510 if requires.tag.endswith("requires"):
517 for pkg in list(requires):
518 pkgname = pkg.attrib["name"]
519 if pkgname not in pkglist:
520 pkglist.append(pkgname)
524 def get_pkglist_in_comps(group, comps):
528 root = xmlparse(comps)
530 raise SyntaxError("%s syntax error." % comps)
532 for elm in root.getiterator("group"):
534 name = elm.find("name")
535 if id.text == group or name.text == group:
536 packagelist = elm.find("packagelist")
543 for require in elm.getiterator("packagereq"):
544 if require.tag.endswith("packagereq"):
545 pkgname = require.text
546 if pkgname not in pkglist:
547 pkglist.append(pkgname)
551 def is_statically_linked(binary):
552 return ", statically linked, " in runner.outs(['file', binary])
554 def setup_qemu_emulator(rootdir, arch):
555 # mount binfmt_misc if it doesn't exist
556 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
557 modprobecmd = find_binary_path("modprobe")
558 runner.show([modprobecmd, "binfmt_misc"])
559 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
560 mountcmd = find_binary_path("mount")
561 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
563 # qemu_emulator is a special case, we can't use find_binary_path
564 # qemu emulator should be a statically-linked executable file
565 qemu_emulator = "/usr/bin/qemu-arm"
566 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
567 qemu_emulator = "/usr/bin/qemu-arm-static"
568 if not os.path.exists(qemu_emulator):
569 raise CreatorError("Please install a statically-linked qemu-arm")
570 if not os.path.exists(rootdir + "/usr/bin"):
571 makedirs(rootdir + "/usr/bin")
572 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
574 # disable selinux, selinux will block qemu emulator to run
575 if os.path.exists("/usr/sbin/setenforce"):
576 msger.info('Try to disable selinux')
577 runner.show(["/usr/sbin/setenforce", "0"])
579 node = "/proc/sys/fs/binfmt_misc/arm"
580 if is_statically_linked(qemu_emulator) and os.path.exists(node):
583 # unregister it if it has been registered and is a dynamically-linked executable
584 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
585 qemu_unregister_string = "-1\n"
586 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
587 fd.write(qemu_unregister_string)
590 # register qemu emulator for interpreting other arch executable file
591 if not os.path.exists(node):
592 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
593 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
594 fd.write(qemu_arm_string)
599 def create_release(config, destdir, name, outimages, release):
600 """ TODO: This functionality should really be in creator.py inside the
601 ImageCreator class. """
603 # For virtual machine images, we have a subdir for it, this is unnecessary
606 for i in range(len(outimages)):
608 if not os.path.isdir(file) and os.path.dirname(file) != destdir:
609 thatsubdir = os.path.dirname(file)
610 newfile = os.path.join(destdir, os.path.basename(file))
611 shutil.move(file, newfile)
612 outimages[i] = newfile
614 shutil.rmtree(thatsubdir, ignore_errors = True)
616 """ Create release directory and files """
617 runner.show("cp %s %s/%s.ks" % (config, destdir, name))
618 # When building a release we want to make sure the .ks
619 # file generates the same build even when --release= is not used.
620 fd = open(config, "r")
623 kscont = kscont.replace("@BUILD_ID@",release)
624 fd = open("%s/%s.ks" % (destdir,name), "w")
627 outimages.append("%s/%s.ks" % (destdir,name))
629 # Using system + mv, because of * in filename.
630 runner.show("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
631 outimages.append("%s/%s.packages" % (destdir,name))
633 for f in os.listdir(destdir):
634 if f.endswith(".iso"):
635 ff = f.replace(".iso", ".img")
636 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
637 outimages.append("%s/%s" %(destdir, ff))
638 elif f.endswith(".usbimg"):
639 ff = f.replace(".usbimg", ".img")
640 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
641 outimages.append("%s/%s" %(destdir, ff))
643 if os.path.exists("/usr/bin/md5sum"):
644 fd = open(destdir + "/MANIFEST", "w")
645 for f in os.listdir(destdir):
649 rc, md5sum = runner.runtool(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )])
651 msger.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
653 md5sum = md5sum.lstrip().split()[0]
654 fd.write(md5sum+" "+f+"\n")
656 outimages.append("%s/MANIFEST" % destdir)
659 """ Update the file list. """
661 for file in outimages:
662 if os.path.exists("%s" % file):
663 updated_list.append(file)
667 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
669 def get_source_repometadata(repometadata):
671 for repo in repometadata:
672 if repo["name"].endswith("-source"):
673 src_repometadata.append(repo)
675 return src_repometadata
678 def get_src_name(srpm):
679 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
684 src_repometadata = get_source_repometadata(repometadata)
686 if not src_repometadata:
687 msger.warning("No source repo found")
693 for repo in src_repometadata:
694 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
695 lpkgs_path += glob.glob(cachepath)
697 for lpkg in lpkgs_path:
698 lpkg_name = get_src_name(os.path.basename(lpkg))
699 lpkgs_dict[lpkg_name] = lpkg
700 localpkgs = lpkgs_dict.keys()
703 destdir = instroot+'/usr/src/SRPMS'
704 if not os.path.exists(destdir):
709 srcpkg_name = get_source_name(_pkg, repometadata)
712 srcpkgset.add(srcpkg_name)
714 for pkg in list(srcpkgset):
717 shutil.copy(lpkgs_dict[pkg], destdir)
718 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
720 src_pkg = get_package(pkg, src_repometadata, 'src')
722 shutil.copy(src_pkg, destdir)
723 src_pkgs.append(src_pkg)
724 msger.info("%d source packages gotten from cache" %cached_count)