2 # misc.py : miscellaneous utilities
4 # Copyright 2010, 2011 Intel Inc.
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Library General Public License for more details.
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
31 import sqlite3 as sqlite
36 from xml.etree import cElementTree
39 xmlparse = cElementTree.parse
42 from fs_related import *
51 locale.setlocale(locale.LC_ALL,'')
53 os.environ['LC_ALL'] = 'C'
54 locale.setlocale(locale.LC_ALL,'C')
55 sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
56 sys.stdout.errors = 'replace'
58 def get_extension_name(path):
59 match = re.search("(?<=\.)\w+$", path)
65 def get_image_type(path):
66 if os.path.isdir(path):
70 maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
71 extension = get_extension_name(path)
72 if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
73 return maptab[extension]
76 file_header = fd.read(1024)
78 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
79 if file_header[0:len(vdi_flag)] == vdi_flag:
82 dev_null = os.open("/dev/null", os.O_WRONLY)
83 filecmd = find_binary_path("file")
84 args = [ filecmd, path ]
85 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
86 output = file.communicate()[0]
88 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
89 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
90 rawptn = re.compile(r".*x86 boot sector.*")
91 vmdkptn = re.compile(r".*VMware. disk image.*")
92 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
93 if isoptn.match(output):
95 elif usbimgptn.match(output):
96 return maptab["usbimg"]
97 elif rawptn.match(output):
99 elif vmdkptn.match(output):
100 return maptab["vmdk"]
101 elif ext3fsimgptn.match(output):
106 def get_file_size(file):
107 """Return size in MB unit"""
108 du = find_binary_path("du")
109 dev_null = os.open("/dev/null", os.O_WRONLY)
110 duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
111 stdout=subprocess.PIPE, stderr=dev_null)
112 duOutput = duProc.communicate()[0]
113 if duProc.returncode:
114 raise CreatorError("Failed to run %s" % du)
116 size1 = int(duOutput.split()[0])
117 duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
118 stdout=subprocess.PIPE, stderr=dev_null)
119 duOutput = duProc.communicate()[0]
120 if duProc.returncode:
121 raise CreatorError("Failed to run %s" % du)
123 size2 = int(duOutput.split()[0])
130 def get_filesystem_avail(fs):
131 vfstat = os.statvfs(fs)
132 return vfstat.f_bavail * vfstat.f_bsize
134 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
137 raise CreatorError("Invalid destination image format: %s" % dstfmt)
138 msger.debug("converting %s image to %s" % (srcimg, dstimg))
140 path = find_binary_path("qemu-img")
141 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
142 elif srcfmt == "vdi":
143 path = find_binary_path("VBoxManage")
144 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
146 raise CreatorError("Invalid soure image format: %s" % srcfmt)
148 rc = subprocess.call(argv)
150 msger.debug("convert successful")
152 raise CreatorError("Unable to convert disk to %s" % dstfmt)
154 def myxcopytree(src, dst):
155 dev_null = os.open("/dev/null", os.O_WRONLY)
156 dirnames = os.listdir(src)
157 copycmd = find_binary_path("cp")
159 args = [ copycmd, "-af", src + "/" + dir, dst ]
160 subprocess.call(args, stdout=dev_null, stderr=dev_null)
162 ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
163 for exclude in ignores:
164 if os.path.exists(dst + "/" + exclude):
165 os.unlink(dst + "/" + exclude)
167 def uncompress_squashfs(squashfsimg, outdir):
168 """Uncompress file system from squshfs image"""
169 unsquashfs = find_binary_path("unsquashfs")
170 args = [ unsquashfs, "-d", outdir, squashfsimg ]
171 rc = subprocess.call(args)
173 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
175 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
177 return tempfile.mkdtemp(dir = dir, prefix = prefix)
179 def ismeego(rootdir):
181 if (os.path.exists(rootdir + "/etc/moblin-release") \
182 or os.path.exists(rootdir + "/etc/meego-release")) \
183 and os.path.exists(rootdir + "/etc/inittab") \
184 and os.path.exists(rootdir + "/etc/rc.sysinit") \
185 and glob.glob(rootdir + "/boot/vmlinuz-*"):
191 def is_meego_bootstrap(rootdir):
193 if (os.path.exists(rootdir + "/etc/moblin-release") \
194 or os.path.exists(rootdir + "/etc/meego-release")) \
195 and os.path.exists(rootdir + "/usr/bin/python") \
196 and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
203 _my_noproxy_list = []
205 def set_proxy_environ():
206 global _my_noproxy, _my_proxies
209 for key in _my_proxies.keys():
210 os.environ[key + "_proxy"] = _my_proxies[key]
213 os.environ["no_proxy"] = _my_noproxy
215 def unset_proxy_environ():
216 if os.environ.has_key("http_proxy"):
217 del os.environ["http_proxy"]
218 if os.environ.has_key("https_proxy"):
219 del os.environ["https_proxy"]
220 if os.environ.has_key("ftp_proxy"):
221 del os.environ["ftp_proxy"]
222 if os.environ.has_key("all_proxy"):
223 del os.environ["all_proxy"]
224 if os.environ.has_key("no_proxy"):
225 del os.environ["no_proxy"]
226 if os.environ.has_key("HTTP_PROXY"):
227 del os.environ["HTTP_PROXY"]
228 if os.environ.has_key("HTTPS_PROXY"):
229 del os.environ["HTTPS_PROXY"]
230 if os.environ.has_key("FTP_PROXY"):
231 del os.environ["FTP_PROXY"]
232 if os.environ.has_key("ALL_PROXY"):
233 del os.environ["ALL_PROXY"]
234 if os.environ.has_key("NO_PROXY"):
235 del os.environ["NO_PROXY"]
237 def _set_proxies(proxy = None, no_proxy = None):
238 """Return a dictionary of scheme -> proxy server URL mappings."""
239 global _my_noproxy, _my_proxies
244 proxies.append(("http_proxy", proxy))
246 proxies.append(("no_proxy", no_proxy))
248 """Get proxy settings from environment variables if not provided"""
249 if not proxy and not no_proxy:
250 proxies = os.environ.items()
252 """ Remove proxy env variables, urllib2 can't handle them correctly """
253 unset_proxy_environ()
255 for name, value in proxies:
257 if value and name[-6:] == '_proxy':
258 if name[0:2] != "no":
259 _my_proxies[name[:-6]] = value
266 for dec in ip.split("."):
267 ipint |= int(dec) << shift
277 ipaddr = ".%d%s" % (dec, ipaddr)
282 if host.replace(".", "").isdigit():
286 def set_noproxy_list():
287 global _my_noproxy, _my_noproxy_list
288 _my_noproxy_list = []
291 for item in _my_noproxy.split(","):
295 if item[0] != '.' and item.find("/") == -1:
296 """ Need to match it """
297 _my_noproxy_list.append({"match":0,"needle":item})
299 """ Need to match at tail """
300 _my_noproxy_list.append({"match":1,"needle":item})
301 elif item.find("/") > 3:
302 """ IP/MASK, need to match at head """
303 needle = item[0:item.find("/")].strip()
304 ip = ip_to_int(needle)
306 mask = item[item.find("/")+1:].strip()
310 netmask = ~((1<<(32-netmask)) - 1)
315 for dec in mask.split("."):
316 netmask |= int(dec) << shift
319 _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
322 (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
324 user_pass, host = host.split('@', 1)
326 host, port = host.split(':', 1)
327 hostisip = isip(host)
328 for item in _my_noproxy_list:
329 if hostisip and item["match"] <= 1:
331 if item["match"] == 2 and hostisip:
332 if (ip_to_int(host) & item["netmask"]) == item["needle"]:
334 if item["match"] == 0:
335 if host == item["needle"]:
337 if item["match"] == 1:
338 if host.rfind(item["needle"]) > 0:
342 def set_proxies(proxy = None, no_proxy = None):
343 _set_proxies(proxy, no_proxy)
347 if url[0:4] == "file" or isnoproxy(url):
349 type = url[0:url.index(":")]
351 if _my_proxies.has_key(type):
352 proxy = _my_proxies[type]
353 elif _my_proxies.has_key("http"):
354 proxy = _my_proxies["http"]
359 def get_temp_reponame(baseurl):
360 md5obj = hashlib.md5(baseurl)
361 tmpreponame = "%s" % md5obj.hexdigest()
364 def get_repostrs_from_ks(ks):
366 for repodata in ks.handler.repo.repoList:
368 if hasattr(repodata, "name") and repodata.name:
369 repostr += ",name:" + repodata.name
370 if hasattr(repodata, "baseurl") and repodata.baseurl:
371 repostr += ",baseurl:" + repodata.baseurl
372 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
373 repostr += ",mirrorlist:" + repodata.mirrorlist
374 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
375 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
376 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
377 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
378 if hasattr(repodata, "cost") and repodata.cost:
379 repostr += ",cost:%d" % repodata.cost
380 if hasattr(repodata, "save") and repodata.save:
382 if hasattr(repodata, "proxy") and repodata.proxy:
383 repostr += ",proxy:" + repodata.proxy
384 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
385 repostr += ",proxyuser:" + repodata.proxy_username
386 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
387 repostr += ",proxypasswd:" + repodata.proxy_password
388 if repostr.find("name:") == -1:
389 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
390 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
391 repostr += ",debuginfo:"
392 if hasattr(repodata, "source") and repodata.source:
393 repostr += ",source:"
394 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
395 repostr += ",gpgkey:" + repodata.gpgkey
396 kickstart_repos.append(repostr[1:])
397 return kickstart_repos
399 def get_uncompressed_data_from_url(url, filename, proxies):
400 filename = myurlgrab(url, filename, proxies)
402 if filename.endswith(".gz"):
404 gunzip = find_binary_path('gunzip')
405 subprocess.call([gunzip, "-f", filename])
406 elif filename.endswith(".bz2"):
408 bunzip2 = find_binary_path('bunzip2')
409 subprocess.call([bunzip2, "-f", filename])
411 filename = filename.replace(suffix, "")
414 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
415 url = str(baseurl + "/" + filename)
416 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
417 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
419 def get_metadata_from_repos(repostrs, cachedir):
420 my_repo_metadata = []
421 for repostr in repostrs:
425 items = repostr.split(",")
427 subitems = item.split(":")
428 if subitems[0] == "name":
429 reponame = subitems[1]
430 if subitems[0] == "baseurl":
432 if subitems[0] == "proxy":
434 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
437 proxy = get_proxy(baseurl)
440 proxies = {str(proxy.split(":")[0]):str(proxy)}
441 makedirs(cachedir + "/" + reponame)
442 url = str(baseurl + "/repodata/repomd.xml")
443 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
444 repomd = myurlgrab(url, filename, proxies)
446 root = xmlparse(repomd)
448 raise CreatorError("repomd.xml syntax error.")
450 ns = root.getroot().tag
451 ns = ns[0:ns.rindex("}")+1]
454 for elm in root.getiterator("%sdata" % ns):
455 if elm.attrib["type"] == "patterns":
456 patterns = elm.find("%slocation" % ns).attrib['href']
460 for elm in root.getiterator("%sdata" % ns):
461 if elm.attrib["type"] == "group_gz":
462 comps = elm.find("%slocation" % ns).attrib['href']
465 for elm in root.getiterator("%sdata" % ns):
466 if elm.attrib["type"] == "group":
467 comps = elm.find("%slocation" % ns).attrib['href']
471 for elm in root.getiterator("%sdata" % ns):
472 if elm.attrib["type"] == "primary_db":
473 primary_type=".sqlite"
477 for elm in root.getiterator("%sdata" % ns):
478 if elm.attrib["type"] == "primary":
485 primary = elm.find("%slocation" % ns).attrib['href']
486 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
489 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
492 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
496 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
499 msger.warning("can't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
501 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
503 return my_repo_metadata
505 def get_arch(repometadata):
507 for repo in repometadata:
508 if repo["primary"].endswith(".xml"):
509 root = xmlparse(repo["primary"])
510 ns = root.getroot().tag
511 ns = ns[0:ns.rindex("}")+1]
512 for elm in root.getiterator("%spackage" % ns):
513 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
514 arch = elm.find("%sarch" % ns).text
515 if arch not in archlist:
516 archlist.append(arch)
517 elif repo["primary"].endswith(".sqlite"):
518 con = sqlite.connect(repo["primary"])
519 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
520 if row[0] not in archlist:
521 archlist.append(row[0])
526 def get_package(pkg, repometadata, arch = None):
529 for repo in repometadata:
530 if repo["primary"].endswith(".xml"):
531 root = xmlparse(repo["primary"])
532 ns = root.getroot().tag
533 ns = ns[0:ns.rindex("}")+1]
534 for elm in root.getiterator("%spackage" % ns):
535 if elm.find("%sname" % ns).text == pkg:
536 if elm.find("%sarch" % ns).text != "src":
537 version = elm.find("%sversion" % ns)
538 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
541 location = elm.find("%slocation" % ns)
542 pkgpath = "%s" % location.attrib['href']
545 if repo["primary"].endswith(".sqlite"):
546 con = sqlite.connect(repo["primary"])
548 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
549 tmpver = "%s-%s" % (row[0], row[1])
551 pkgpath = "%s" % row[2]
555 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
556 tmpver = "%s-%s" % (row[0], row[1])
558 pkgpath = "%s" % row[2]
563 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
564 url = str(target_repo["baseurl"] + "/" + pkgpath)
565 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
566 pkg = myurlgrab(url, filename, target_repo["proxies"])
571 def get_source_name(pkg, repometadata):
573 def get_bin_name(pkg):
574 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
579 def get_src_name(srpm):
580 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
588 pkg_name = get_bin_name(pkg)
592 for repo in repometadata:
593 if repo["primary"].endswith(".xml"):
594 root = xmlparse(repo["primary"])
595 ns = root.getroot().tag
596 ns = ns[0:ns.rindex("}")+1]
597 for elm in root.getiterator("%spackage" % ns):
598 if elm.find("%sname" % ns).text == pkg_name:
599 if elm.find("%sarch" % ns).text != "src":
600 version = elm.find("%sversion" % ns)
601 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
604 fmt = elm.find("%sformat" % ns)
606 fns = fmt.getchildren()[0].tag
607 fns = fns[0:fns.rindex("}")+1]
608 pkgpath = fmt.find("%ssourcerpm" % fns).text
612 if repo["primary"].endswith(".sqlite"):
613 con = sqlite.connect(repo["primary"])
614 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
615 tmpver = "%s-%s" % (row[0], row[1])
617 pkgpath = "%s" % row[2]
622 return get_src_name(pkgpath)
626 def get_release_no(repometadata, distro="meego"):
627 cpio = find_binary_path("cpio")
628 rpm2cpio = find_binary_path("rpm2cpio")
629 release_pkg = get_package("%s-release" % distro, repometadata)
634 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
635 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
637 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
641 shutil.rmtree(tmpdir, ignore_errors = True)
642 return content.split(" ")[2]
646 def get_kickstarts_from_repos(repometadata):
648 for repo in repometadata:
650 root = xmlparse(repo["repomd"])
652 raise CreatorError("repomd.xml syntax error.")
654 ns = root.getroot().tag
655 ns = ns[0:ns.rindex("}")+1]
657 for elm in root.getiterator("%sdata" % ns):
658 if elm.attrib["type"] == "image-config":
661 if elm.attrib["type"] != "image-config":
664 location = elm.find("%slocation" % ns)
665 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
666 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
668 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
671 root = xmlparse(image_config)
673 raise CreatorError("image-config.xml syntax error.")
675 for elm in root.getiterator("config"):
676 path = elm.find("path").text
677 path = path.replace("images-config", "image-config")
678 description = elm.find("description").text
679 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
681 if "http" not in path:
682 url = str(repo["baseurl"] + "/" + path)
683 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
684 path = myurlgrab(url, filename, repo["proxies"])
685 kickstarts.append({"filename":path,"description":description})
688 def select_ks(ksfiles):
689 msger.info("Available kickstart files:")
693 msger.raw("\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"])))
696 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
697 if choice.lower() == "q":
701 if choice >= 1 and choice <= i:
704 return ksfiles[choice-1]["filename"]
706 def get_pkglist_in_patterns(group, patterns):
710 root = xmlparse(patterns)
712 raise SyntaxError("%s syntax error." % patterns)
714 for elm in list(root.getroot()):
716 ns = ns[0:ns.rindex("}")+1]
717 name = elm.find("%sname" % ns)
718 summary = elm.find("%ssummary" % ns)
719 if name.text == group or summary.text == group:
727 for requires in list(elm):
728 if requires.tag.endswith("requires"):
735 for pkg in list(requires):
736 pkgname = pkg.attrib["name"]
737 if pkgname not in pkglist:
738 pkglist.append(pkgname)
742 def get_pkglist_in_comps(group, comps):
746 root = xmlparse(comps)
748 raise SyntaxError("%s syntax error." % comps)
750 for elm in root.getiterator("group"):
752 name = elm.find("name")
753 if id.text == group or name.text == group:
754 packagelist = elm.find("packagelist")
761 for require in elm.getiterator("packagereq"):
762 if require.tag.endswith("packagereq"):
763 pkgname = require.text
764 if pkgname not in pkglist:
765 pkglist.append(pkgname)
769 def is_statically_linked(binary):
771 dev_null = os.open("/dev/null", os.O_WRONLY)
772 filecmd = find_binary_path("file")
773 args = [ filecmd, binary ]
774 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
775 output = file.communicate()[0]
777 if output.find(", statically linked, ") > 0:
781 def setup_qemu_emulator(rootdir, arch):
782 # mount binfmt_misc if it doesn't exist
783 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
784 modprobecmd = find_binary_path("modprobe")
785 subprocess.call([modprobecmd, "binfmt_misc"])
786 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
787 mountcmd = find_binary_path("mount")
788 subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
790 # qemu_emulator is a special case, we can't use find_binary_path
791 # qemu emulator should be a statically-linked executable file
792 qemu_emulator = "/usr/bin/qemu-arm"
793 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
794 qemu_emulator = "/usr/bin/qemu-arm-static"
795 if not os.path.exists(qemu_emulator):
796 raise CreatorError("Please install a statically-linked qemu-arm")
797 if not os.path.exists(rootdir + "/usr/bin"):
798 makedirs(rootdir + "/usr/bin")
799 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
801 # disable selinux, selinux will block qemu emulator to run
802 if os.path.exists("/usr/sbin/setenforce"):
803 subprocess.call(["/usr/sbin/setenforce", "0"])
805 node = "/proc/sys/fs/binfmt_misc/arm"
806 if is_statically_linked(qemu_emulator) and os.path.exists(node):
809 # unregister it if it has been registered and is a dynamically-linked executable
810 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
811 qemu_unregister_string = "-1\n"
812 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
813 fd.write(qemu_unregister_string)
816 # register qemu emulator for interpreting other arch executable file
817 if not os.path.exists(node):
818 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
819 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
820 fd.write(qemu_arm_string)
825 def create_release(config, destdir, name, outimages, release):
826 """ TODO: This functionality should really be in creator.py inside the
827 ImageCreator class. """
829 # For virtual machine images, we have a subdir for it, this is unnecessary
832 for i in range(len(outimages)):
834 if not os.path.isdir(file) and os.path.dirname(file) != destdir:
835 thatsubdir = os.path.dirname(file)
836 newfile = os.path.join(destdir, os.path.basename(file))
837 shutil.move(file, newfile)
838 outimages[i] = newfile
840 shutil.rmtree(thatsubdir, ignore_errors = True)
842 """ Create release directory and files """
843 os.system ("cp %s %s/%s.ks" % (config, destdir, name))
844 # When building a release we want to make sure the .ks
845 # file generates the same build even when --release= is not used.
846 fd = open(config, "r")
849 kscont = kscont.replace("@BUILD_ID@",release)
850 fd = open("%s/%s.ks" % (destdir,name), "w")
853 outimages.append("%s/%s.ks" % (destdir,name))
855 # Using system + mv, because of * in filename.
856 os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
857 outimages.append("%s/%s.packages" % (destdir,name))
859 d = os.listdir(destdir)
861 if f.endswith(".iso"):
862 ff = f.replace(".iso", ".img")
863 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
864 outimages.append("%s/%s" %(destdir, ff))
865 elif f.endswith(".usbimg"):
866 ff = f.replace(".usbimg", ".img")
867 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
868 outimages.append("%s/%s" %(destdir, ff))
870 fd = open(destdir + "/MANIFEST", "w")
871 d = os.listdir(destdir)
875 if os.path.exists("/usr/bin/md5sum"):
876 p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
877 stdout=subprocess.PIPE)
878 (md5sum, errorstr) = p.communicate()
879 if p.returncode != 0:
880 msger.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
882 md5sum = md5sum.split(" ")[0]
883 fd.write(md5sum+" "+f+"\n")
885 outimages.append("%s/MANIFEST" % destdir)
888 """ Update the file list. """
890 for file in outimages:
891 if os.path.exists("%s" % file):
892 updated_list.append(file)
896 def get_local_distro():
897 msger.info("Local linux distribution:")
898 for file in glob.glob("/etc/*-release"):
903 if os.path.exists("/etc/issue"):
904 fd = open("/etc/issue", "r")
909 msger.info("Local Kernel version: " + os.uname()[2])
911 def check_mic_installation(argv):
912 creator_name = os.path.basename(argv[0])
913 if os.path.exists("/usr/local/bin/" + creator_name) \
914 and os.path.exists("/usr/bin/" + creator_name):
915 raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
917 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
919 def get_source_repometadata(repometadata):
921 for repo in repometadata:
922 if repo["name"].endswith("-source"):
923 src_repometadata.append(repo)
925 return src_repometadata
928 def get_src_name(srpm):
929 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
934 src_repometadata = get_source_repometadata(repometadata)
936 if not src_repometadata:
937 msger.warning("No source repo found")
943 for repo in src_repometadata:
944 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
945 lpkgs_path += glob.glob(cachepath)
947 for lpkg in lpkgs_path:
948 lpkg_name = get_src_name(os.path.basename(lpkg))
949 lpkgs_dict[lpkg_name] = lpkg
950 localpkgs = lpkgs_dict.keys()
953 destdir = instroot+'/usr/src/SRPMS'
954 if not os.path.exists(destdir):
959 srcpkg_name = get_source_name(_pkg, repometadata)
962 srcpkgset.add(srcpkg_name)
964 for pkg in list(srcpkgset):
967 shutil.copy(lpkgs_dict[pkg], destdir)
968 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
970 src_pkg = get_package(pkg, src_repometadata, 'src')
972 shutil.copy(src_pkg, destdir)
973 src_pkgs.append(src_pkg)
974 msger.info("%d source packages gotten from cache" %cached_count)