2 # misc.py : miscellaneous utilities
4 # Copyright 2010, Intel Inc.
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Library General Public License for more details.
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 import xml.dom.minidom
35 import sqlite3 as sqlite
41 from xml.etree import cElementTree
44 xmlparse = cElementTree.parse
47 from fs_related import *
54 locale.setlocale(locale.LC_ALL,'')
56 os.environ['LC_ALL'] = 'C'
57 locale.setlocale(locale.LC_ALL,'C')
58 sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
59 sys.stdout.errors = 'replace'
61 def get_extension_name(path):
62 match = re.search("(?<=\.)\w+$", path)
68 def get_image_type(path):
69 if os.path.isdir(path):
73 maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
74 extension = get_extension_name(path)
75 if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
76 return maptab[extension]
79 file_header = fd.read(1024)
81 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
82 if file_header[0:len(vdi_flag)] == vdi_flag:
85 dev_null = os.open("/dev/null", os.O_WRONLY)
86 filecmd = find_binary_path("file")
87 args = [ filecmd, path ]
88 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
89 output = file.communicate()[0]
91 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
92 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
93 rawptn = re.compile(r".*x86 boot sector.*")
94 vmdkptn = re.compile(r".*VMware. disk image.*")
95 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
96 if isoptn.match(output):
98 elif usbimgptn.match(output):
99 return maptab["usbimg"]
100 elif rawptn.match(output):
102 elif vmdkptn.match(output):
103 return maptab["vmdk"]
104 elif ext3fsimgptn.match(output):
109 def get_file_size(file):
110 """Return size in MB unit"""
111 du = find_binary_path("du")
112 dev_null = os.open("/dev/null", os.O_WRONLY)
113 duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
114 stdout=subprocess.PIPE, stderr=dev_null)
115 duOutput = duProc.communicate()[0]
116 if duProc.returncode:
117 raise CreatorError("Failed to run %s" % du)
119 size1 = int(duOutput.split()[0])
120 duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
121 stdout=subprocess.PIPE, stderr=dev_null)
122 duOutput = duProc.communicate()[0]
123 if duProc.returncode:
124 raise CreatorError("Failed to run %s" % du)
126 size2 = int(duOutput.split()[0])
133 def get_filesystem_avail(fs):
134 vfstat = os.statvfs(fs)
135 return vfstat.f_bavail * vfstat.f_bsize
137 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
140 raise CreatorError("Invalid destination image format: %s" % dstfmt)
141 logging.debug("converting %s image to %s" % (srcimg, dstimg))
143 path = find_binary_path("qemu-img")
144 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
145 elif srcfmt == "vdi":
146 path = find_binary_path("VBoxManage")
147 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
149 raise CreatorError("Invalid soure image format: %s" % srcfmt)
151 rc = subprocess.call(argv)
153 logging.debug("convert successful")
155 raise CreatorError("Unable to convert disk to %s" % dstfmt)
157 def myxcopytree(src, dst):
158 dev_null = os.open("/dev/null", os.O_WRONLY)
159 dirnames = os.listdir(src)
160 copycmd = find_binary_path("cp")
162 args = [ copycmd, "-af", src + "/" + dir, dst ]
163 subprocess.call(args, stdout=dev_null, stderr=dev_null)
165 ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
166 for exclude in ignores:
167 if os.path.exists(dst + "/" + exclude):
168 os.unlink(dst + "/" + exclude)
170 def uncompress_squashfs(squashfsimg, outdir):
171 """Uncompress file system from squshfs image"""
172 unsquashfs = find_binary_path("unsquashfs")
173 args = [ unsquashfs, "-d", outdir, squashfsimg ]
174 rc = subprocess.call(args)
176 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
178 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
180 return tempfile.mkdtemp(dir = dir, prefix = prefix)
182 def ismeego(rootdir):
184 if (os.path.exists(rootdir + "/etc/moblin-release") \
185 or os.path.exists(rootdir + "/etc/meego-release")) \
186 and os.path.exists(rootdir + "/etc/inittab") \
187 and os.path.exists(rootdir + "/etc/rc.sysinit") \
188 and glob.glob(rootdir + "/boot/vmlinuz-*"):
194 def is_meego_bootstrap(rootdir):
196 if (os.path.exists(rootdir + "/etc/moblin-release") \
197 or os.path.exists(rootdir + "/etc/meego-release")) \
198 and os.path.exists(rootdir + "/usr/bin/python") \
199 and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
207 _my_noproxy_list = []
209 def set_proxy_environ():
210 global _my_noproxy, _my_proxies
213 for key in _my_proxies.keys():
214 os.environ[key + "_proxy"] = _my_proxies[key]
217 os.environ["no_proxy"] = _my_noproxy
219 def unset_proxy_environ():
220 if os.environ.has_key("http_proxy"):
221 del os.environ["http_proxy"]
222 if os.environ.has_key("https_proxy"):
223 del os.environ["https_proxy"]
224 if os.environ.has_key("ftp_proxy"):
225 del os.environ["ftp_proxy"]
226 if os.environ.has_key("all_proxy"):
227 del os.environ["all_proxy"]
228 if os.environ.has_key("no_proxy"):
229 del os.environ["no_proxy"]
230 if os.environ.has_key("HTTP_PROXY"):
231 del os.environ["HTTP_PROXY"]
232 if os.environ.has_key("HTTPS_PROXY"):
233 del os.environ["HTTPS_PROXY"]
234 if os.environ.has_key("FTP_PROXY"):
235 del os.environ["FTP_PROXY"]
236 if os.environ.has_key("ALL_PROXY"):
237 del os.environ["ALL_PROXY"]
238 if os.environ.has_key("NO_PROXY"):
239 del os.environ["NO_PROXY"]
241 def _set_proxies(proxy = None, no_proxy = None):
242 """Return a dictionary of scheme -> proxy server URL mappings."""
243 global _my_noproxy, _my_proxies
248 proxies.append(("http_proxy", proxy))
250 proxies.append(("no_proxy", no_proxy))
252 """Get proxy settings from environment variables if not provided"""
253 if not proxy and not no_proxy:
254 proxies = os.environ.items()
256 """ Remove proxy env variables, urllib2 can't handle them correctly """
257 unset_proxy_environ()
259 for name, value in proxies:
261 if value and name[-6:] == '_proxy':
262 if name[0:2] != "no":
263 _my_proxies[name[:-6]] = value
270 for dec in ip.split("."):
271 ipint |= int(dec) << shift
281 ipaddr = ".%d%s" % (dec, ipaddr)
286 if host.replace(".", "").isdigit():
290 def set_noproxy_list():
291 global _my_noproxy, _my_noproxy_list
292 _my_noproxy_list = []
295 for item in _my_noproxy.split(","):
299 if item[0] != '.' and item.find("/") == -1:
300 """ Need to match it """
301 _my_noproxy_list.append({"match":0,"needle":item})
303 """ Need to match at tail """
304 _my_noproxy_list.append({"match":1,"needle":item})
305 elif item.find("/") > 3:
306 """ IP/MASK, need to match at head """
307 needle = item[0:item.find("/")].strip()
308 ip = ip_to_int(needle)
310 mask = item[item.find("/")+1:].strip()
314 netmask = ~((1<<(32-netmask)) - 1)
319 for dec in mask.split("."):
320 netmask |= int(dec) << shift
323 _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
326 (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
328 user_pass, host = host.split('@', 1)
330 host, port = host.split(':', 1)
331 hostisip = isip(host)
332 for item in _my_noproxy_list:
333 if hostisip and item["match"] <= 1:
335 if item["match"] == 2 and hostisip:
336 if (ip_to_int(host) & item["netmask"]) == item["needle"]:
338 if item["match"] == 0:
339 if host == item["needle"]:
341 if item["match"] == 1:
342 if host.rfind(item["needle"]) > 0:
346 def set_proxies(proxy = None, no_proxy = None):
347 _set_proxies(proxy, no_proxy)
351 if url[0:4] == "file" or isnoproxy(url):
353 type = url[0:url.index(":")]
355 if _my_proxies.has_key(type):
356 proxy = _my_proxies[type]
357 elif _my_proxies.has_key("http"):
358 proxy = _my_proxies["http"]
363 def remap_repostr(repostr, siteconf):
364 items = repostr.split(",")
368 subitems = item.split(":")
369 if subitems[0] == "name":
371 if subitems[0] == "baseurl":
376 for section in siteconf._sections:
377 if section != "main":
378 if not siteconf.has_option(section, "enabled") or siteconf.get(section, "enabled") == "0":
380 if siteconf.has_option(section, "equalto"):
381 equalto = siteconf.get(section, "equalto")
382 if (name and equalto == name) or (baseurl and equalto == baseurl):
383 remap_baseurl = siteconf.get(section, "baseurl")
384 repostr = repostr.replace(baseurl, remap_baseurl)
390 def get_temp_reponame(baseurl):
391 md5obj = hashlib.md5(baseurl)
392 tmpreponame = "%s" % md5obj.hexdigest()
395 def get_repostr(repo, siteconf = None):
397 repo = remap_repostr(repo, siteconf)
398 keys = ("baseurl", "mirrorlist", "name", "cost", "includepkgs", "excludepkgs", "proxy", "save", "proxyuser", "proxypasswd", "debuginfo", "source", "gpgkey")
400 items = repo.split(",")
402 subitems = items[0].split(":")
403 if len(subitems) == 1:
405 repostr += " --baseurl=%s" % url
406 elif subitems[0] == "baseurl":
408 repostr += " --baseurl=%s" % url
409 elif subitems[0] in ("http", "ftp", "https", "ftps", "file"):
411 repostr += " --baseurl=%s" % url
413 raise ValueError("Invalid repo string")
414 if url.find("://") == -1 \
415 or url[0:url.index("://")] not in ("http", "ftp", "https", "ftps", "file") \
416 or url.find("/", url.index("://")+3) == -1:
417 raise ValueError("Invalid repo string")
419 if repo.find("baseurl:") == -1 and repo.find("mirrorlist:") == -1:
420 raise ValueError("Invalid repo string")
425 subitems = item.split(":")
426 if subitems[0] in keys:
427 if subitems[0] in ("baseurl", "mirrorlist"):
428 url = item[len(subitems[0])+1:]
429 if subitems[0] in ("save", "debuginfo", "source"):
430 repostr += " --%s" % subitems[0]
431 elif subitems[0] in ("includepkgs", "excludepkgs"):
432 repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:].replace(";", ","))
434 repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:])
436 raise ValueError("Invalid repo string")
437 if url.find("://") != -1 \
438 and url[0:url.index("://")] in ("http", "ftp", "https", "ftps", "file") \
439 and url.find("/", url.index("://")+3) != -1:
440 if repostr.find("--proxy=") == -1:
441 proxy = get_proxy(url)
443 repostr += " --proxy=%s" % proxy
445 raise ValueError("Invalid repo string")
447 if repostr.find("--name=") == -1:
448 repostr += " --name=%s" % get_temp_reponame(url)
452 DEFAULT_SITECONF_GLOBAL="/etc/mic2/mic2.conf"
453 DEFAULT_SITECONF_USER="~/.mic2.conf"
455 def read_siteconf(siteconf = None):
456 from ConfigParser import SafeConfigParser
458 my_siteconf_parser = SafeConfigParser()
460 global_siteconf = DEFAULT_SITECONF_GLOBAL
461 if os.path.isfile(global_siteconf):
462 my_siteconf_parser.read(global_siteconf)
464 local_siteconf = os.path.expanduser(DEFAULT_SITECONF_USER)
465 if os.path.isfile(local_siteconf):
466 my_siteconf_parser.read(local_siteconf)
468 my_siteconf_parser.read(siteconf)
470 if not my_siteconf_parser.sections():
473 return my_siteconf_parser
475 def output_siteconf(siteconf):
480 for section in siteconf.sections():
481 output += "[%s]\n" % section
482 for option in siteconf.options(section):
483 output += "%s=%s\n" % (option, siteconf.get(section, option))
489 def get_repostrs_from_ks(ks):
491 for repodata in ks.handler.repo.repoList:
493 if hasattr(repodata, "name") and repodata.name:
494 repostr += ",name:" + repodata.name
495 if hasattr(repodata, "baseurl") and repodata.baseurl:
496 repostr += ",baseurl:" + repodata.baseurl
497 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
498 repostr += ",mirrorlist:" + repodata.mirrorlist
499 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
500 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
501 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
502 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
503 if hasattr(repodata, "cost") and repodata.cost:
504 repostr += ",cost:%d" % repodata.cost
505 if hasattr(repodata, "save") and repodata.save:
507 if hasattr(repodata, "proxy") and repodata.proxy:
508 repostr += ",proxy:" + repodata.proxy
509 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
510 repostr += ",proxyuser:" + repodata.proxy_username
511 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
512 repostr += ",proxypasswd:" + repodata.proxy_password
513 if repostr.find("name:") == -1:
514 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
515 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
516 repostr += ",debuginfo:"
517 if hasattr(repodata, "source") and repodata.source:
518 repostr += ",source:"
519 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
520 repostr += ",gpgkey:" + repodata.gpgkey
521 kickstart_repos.append(repostr[1:])
522 return kickstart_repos
524 def get_repostrs_from_siteconf(siteconf):
529 for section in siteconf._sections:
530 if section != "main":
532 if siteconf.has_option(section, "enabled") \
533 and siteconf.get(section, "enabled") == "1" \
534 and (not siteconf.has_option(section, "equalto") or not siteconf.get(section, "equalto")):
535 if siteconf.has_option(section, "name") and siteconf.get(section, "name"):
536 repostr += ",name:%s" % siteconf.get(section, "name")
537 if siteconf.has_option(section, "baseurl") and siteconf.get(section, "baseurl"):
538 repostr += ",baseurl:%s" % siteconf.get(section, "baseurl")
539 if siteconf.has_option(section, "mirrorlist") and siteconf.get(section, "mirrorlist"):
540 repostr += ",mirrorlist:%s" % siteconf.get(section, "mirrorlist")
541 if siteconf.has_option(section, "includepkgs") and siteconf.get(section, "includepkgs"):
542 repostr += ",includepkgs:%s" % siteconf.get(section, "includepkgs").replace(",", ";")
543 if siteconf.has_option(section, "excludepkgs") and siteconf.get(section, "excludepkgs"):
544 repostr += ",excludepkgs:%s" % siteconf.get(section, "excludepkgs").replace(",", ";")
545 if siteconf.has_option(section, "cost") and siteconf.get(section, "cost"):
546 repostr += ",cost:%s" % siteconf.get(section, "cost")
547 if siteconf.has_option(section, "save") and siteconf.get(section, "save"):
549 if siteconf.has_option(section, "proxy") and siteconf.get(section, "proxy"):
550 repostr += ",proxy:%s" % siteconf.get(section, "proxy")
551 if siteconf.has_option(section, "proxy_username") and siteconf.get(section, "proxy_username"):
552 repostr += ",proxyuser:%s" % siteconf.get(section, "proxy_username")
553 if siteconf.has_option(section, "proxy_password") and siteconf.get(section, "proxy_password"):
554 repostr += ",proxypasswd:%s" % siteconf.get(section, "proxy_password")
556 if repostr.find("name:") == -1:
557 repostr = ",name:%s" % get_temp_reponame()
558 site_repos.append(repostr[1:])
561 def get_uncompressed_data_from_url(url, filename, proxies):
562 filename = myurlgrab(url, filename, proxies)
564 if filename.endswith(".gz"):
566 gunzip = find_binary_path('gunzip')
567 subprocess.call([gunzip, "-f", filename])
568 elif filename.endswith(".bz2"):
570 bunzip2 = find_binary_path('bunzip2')
571 subprocess.call([bunzip2, "-f", filename])
573 filename = filename.replace(suffix, "")
576 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
577 url = str(baseurl + "/" + filename)
578 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
579 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
581 def get_metadata_from_repos(repostrs, cachedir):
583 CreatorError("No cache dir defined.")
585 my_repo_metadata = []
586 for repostr in repostrs:
590 items = repostr.split(",")
592 subitems = item.split(":")
593 if subitems[0] == "name":
594 reponame = subitems[1]
595 if subitems[0] == "baseurl":
597 if subitems[0] == "proxy":
599 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
602 proxy = get_proxy(baseurl)
605 proxies = {str(proxy.split(":")[0]):str(proxy)}
606 makedirs(cachedir + "/" + reponame)
607 url = str(baseurl + "/repodata/repomd.xml")
608 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
609 repomd = myurlgrab(url, filename, proxies)
611 root = xmlparse(repomd)
613 raise CreatorError("repomd.xml syntax error.")
615 ns = root.getroot().tag
616 ns = ns[0:ns.rindex("}")+1]
619 for elm in root.getiterator("%sdata" % ns):
620 if elm.attrib["type"] == "patterns":
621 patterns = elm.find("%slocation" % ns).attrib['href']
625 for elm in root.getiterator("%sdata" % ns):
626 if elm.attrib["type"] == "group_gz":
627 comps = elm.find("%slocation" % ns).attrib['href']
630 for elm in root.getiterator("%sdata" % ns):
631 if elm.attrib["type"] == "group":
632 comps = elm.find("%slocation" % ns).attrib['href']
636 for elm in root.getiterator("%sdata" % ns):
637 if elm.attrib["type"] == "primary_db":
638 primary_type=".sqlite"
642 for elm in root.getiterator("%sdata" % ns):
643 if elm.attrib["type"] == "primary":
650 primary = elm.find("%slocation" % ns).attrib['href']
651 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
654 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
657 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
661 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
664 print "Warning: can't get %s/%s" % (baseurl, "repodata/repomd.xml.key")
666 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
667 return my_repo_metadata
669 def get_arch(repometadata):
671 for repo in repometadata:
672 if repo["primary"].endswith(".xml"):
673 root = xmlparse(repo["primary"])
674 ns = root.getroot().tag
675 ns = ns[0:ns.rindex("}")+1]
676 for elm in root.getiterator("%spackage" % ns):
677 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
678 arch = elm.find("%sarch" % ns).text
679 if arch not in archlist:
680 archlist.append(arch)
681 elif repo["primary"].endswith(".sqlite"):
682 con = sqlite.connect(repo["primary"])
683 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
684 if row[0] not in archlist:
685 archlist.append(row[0])
691 def get_package(pkg, repometadata, arch = None):
694 for repo in repometadata:
695 if repo["primary"].endswith(".xml"):
696 root = xmlparse(repo["primary"])
697 ns = root.getroot().tag
698 ns = ns[0:ns.rindex("}")+1]
699 for elm in root.getiterator("%spackage" % ns):
700 if elm.find("%sname" % ns).text == pkg:
701 if elm.find("%sarch" % ns).text != "src":
702 version = elm.find("%sversion" % ns)
703 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
706 location = elm.find("%slocation" % ns)
707 pkgpath = "%s" % location.attrib['href']
710 if repo["primary"].endswith(".sqlite"):
711 con = sqlite.connect(repo["primary"])
713 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
714 tmpver = "%s-%s" % (row[0], row[1])
716 pkgpath = "%s" % row[2]
720 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
721 tmpver = "%s-%s" % (row[0], row[1])
723 pkgpath = "%s" % row[2]
728 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
729 url = str(target_repo["baseurl"] + "/" + pkgpath)
730 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
731 pkg = myurlgrab(url, filename, target_repo["proxies"])
736 def get_source_name(pkg, repometadata):
738 def get_bin_name(pkg):
739 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
744 def get_src_name(srpm):
745 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
753 pkg_name = get_bin_name(pkg)
757 for repo in repometadata:
758 if repo["primary"].endswith(".xml"):
759 root = xmlparse(repo["primary"])
760 ns = root.getroot().tag
761 ns = ns[0:ns.rindex("}")+1]
762 for elm in root.getiterator("%spackage" % ns):
763 if elm.find("%sname" % ns).text == pkg_name:
764 if elm.find("%sarch" % ns).text != "src":
765 version = elm.find("%sversion" % ns)
766 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
769 fmt = elm.find("%sformat" % ns)
771 fns = fmt.getchildren()[0].tag
772 fns = fns[0:fns.rindex("}")+1]
773 pkgpath = fmt.find("%ssourcerpm" % fns).text
777 if repo["primary"].endswith(".sqlite"):
778 con = sqlite.connect(repo["primary"])
779 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
780 tmpver = "%s-%s" % (row[0], row[1])
782 pkgpath = "%s" % row[2]
787 return get_src_name(pkgpath)
791 def get_release_no(repometadata, distro="meego"):
792 cpio = find_binary_path("cpio")
793 rpm2cpio = find_binary_path("rpm2cpio")
794 release_pkg = get_package("%s-release" % distro, repometadata)
799 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
800 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
802 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
806 shutil.rmtree(tmpdir, ignore_errors = True)
807 return content.split(" ")[2]
811 def get_kickstarts_from_repos(repometadata):
813 for repo in repometadata:
815 root = xmlparse(repo["repomd"])
817 raise CreatorError("repomd.xml syntax error.")
819 ns = root.getroot().tag
820 ns = ns[0:ns.rindex("}")+1]
822 for elm in root.getiterator("%sdata" % ns):
823 if elm.attrib["type"] == "image-config":
826 if elm.attrib["type"] != "image-config":
829 location = elm.find("%slocation" % ns)
830 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
831 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
833 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
836 root = xmlparse(image_config)
838 raise CreatorError("image-config.xml syntax error.")
840 for elm in root.getiterator("config"):
841 path = elm.find("path").text
842 path = path.replace("images-config", "image-config")
843 description = elm.find("description").text
844 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
846 if "http" not in path:
847 url = str(repo["baseurl"] + "/" + path)
848 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
849 path = myurlgrab(url, filename, repo["proxies"])
850 kickstarts.append({"filename":path,"description":description})
853 def select_ks(ksfiles):
854 print "Available kickstart files:"
858 print "\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"]))
860 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
861 if choice.lower() == "q":
865 if choice >= 1 and choice <= i:
868 return ksfiles[choice-1]["filename"]
871 def get_pkglist_in_patterns(group, patterns):
875 root = xmlparse(patterns)
877 raise SyntaxError("%s syntax error." % patterns)
879 for elm in list(root.getroot()):
881 ns = ns[0:ns.rindex("}")+1]
882 name = elm.find("%sname" % ns)
883 summary = elm.find("%ssummary" % ns)
884 if name.text == group or summary.text == group:
892 for requires in list(elm):
893 if requires.tag.endswith("requires"):
900 for pkg in list(requires):
901 pkgname = pkg.attrib["name"]
902 if pkgname not in pkglist:
903 pkglist.append(pkgname)
907 def get_pkglist_in_comps(group, comps):
911 root = xmlparse(comps)
913 raise SyntaxError("%s syntax error." % comps)
915 for elm in root.getiterator("group"):
917 name = elm.find("name")
918 if id.text == group or name.text == group:
919 packagelist = elm.find("packagelist")
926 for require in elm.getiterator("packagereq"):
927 if require.tag.endswith("packagereq"):
928 pkgname = require.text
929 if pkgname not in pkglist:
930 pkglist.append(pkgname)
934 def is_statically_linked(binary):
936 dev_null = os.open("/dev/null", os.O_WRONLY)
937 filecmd = find_binary_path("file")
938 args = [ filecmd, binary ]
939 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
940 output = file.communicate()[0]
942 if output.find(", statically linked, ") > 0:
946 def setup_qemu_emulator(rootdir, arch):
947 # mount binfmt_misc if it doesn't exist
948 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
949 modprobecmd = find_binary_path("modprobe")
950 subprocess.call([modprobecmd, "binfmt_misc"])
951 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
952 mountcmd = find_binary_path("mount")
953 subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
955 # qemu_emulator is a special case, we can't use find_binary_path
956 # qemu emulator should be a statically-linked executable file
957 qemu_emulator = "/usr/bin/qemu-arm"
958 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
959 qemu_emulator = "/usr/bin/qemu-arm-static"
960 if not os.path.exists(qemu_emulator):
961 raise CreatorError("Please install a statically-linked qemu-arm")
962 if not os.path.exists(rootdir + "/usr/bin"):
963 makedirs(rootdir + "/usr/bin")
964 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
966 # disable selinux, selinux will block qemu emulator to run
967 if os.path.exists("/usr/sbin/setenforce"):
968 subprocess.call(["/usr/sbin/setenforce", "0"])
970 node = "/proc/sys/fs/binfmt_misc/arm"
971 if is_statically_linked(qemu_emulator) and os.path.exists(node):
974 # unregister it if it has been registered and is a dynamically-linked executable
975 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
976 qemu_unregister_string = "-1\n"
977 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
978 fd.write(qemu_unregister_string)
981 # register qemu emulator for interpreting other arch executable file
982 if not os.path.exists(node):
983 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
984 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
985 fd.write(qemu_arm_string)
990 def create_release(config, destdir, name, outimages, release):
991 """ TODO: This functionality should really be in creator.py inside the
992 ImageCreator class. """
994 # For virtual machine images, we have a subdir for it, this is unnecessary
997 for i in range(len(outimages)):
999 if not os.path.isdir(file) and os.path.dirname(file) != destdir:
1000 thatsubdir = os.path.dirname(file)
1001 newfile = os.path.join(destdir, os.path.basename(file))
1002 shutil.move(file, newfile)
1003 outimages[i] = newfile
1005 shutil.rmtree(thatsubdir, ignore_errors = True)
1007 """ Create release directory and files """
1008 os.system ("cp %s %s/%s.ks" % (config, destdir, name))
1009 # When building a release we want to make sure the .ks
1010 # file generates the same build even when --release= is not used.
1011 fd = open(config, "r")
1014 kscont = kscont.replace("@BUILD_ID@",release)
1015 fd = open("%s/%s.ks" % (destdir,name), "w")
1018 outimages.append("%s/%s.ks" % (destdir,name))
1020 # Using system + mv, because of * in filename.
1021 os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
1022 outimages.append("%s/%s.packages" % (destdir,name))
1024 d = os.listdir(destdir)
1026 if f.endswith(".iso"):
1027 ff = f.replace(".iso", ".img")
1028 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1029 outimages.append("%s/%s" %(destdir, ff))
1030 elif f.endswith(".usbimg"):
1031 ff = f.replace(".usbimg", ".img")
1032 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1033 outimages.append("%s/%s" %(destdir, ff))
1035 fd = open(destdir + "/MANIFEST", "w")
1036 d = os.listdir(destdir)
1040 if os.path.exists("/usr/bin/md5sum"):
1041 p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
1042 stdout=subprocess.PIPE)
1043 (md5sum, errorstr) = p.communicate()
1044 if p.returncode != 0:
1045 logging.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
1047 md5sum = md5sum.split(" ")[0]
1048 fd.write(md5sum+" "+f+"\n")
1050 outimages.append("%s/MANIFEST" % destdir)
1053 """ Update the file list. """
1055 for file in outimages:
1056 if os.path.exists("%s" % file):
1057 updated_list.append(file)
1061 def get_local_distro():
1062 print "Local linux distribution:"
1063 for file in glob.glob("/etc/*-release"):
1064 fd = open(file, "r")
1068 if os.path.exists("/etc/issue"):
1069 fd = open("/etc/issue", "r")
1073 print "Local Kernel version: " + os.uname()[2]
1075 def check_mic_installation(argv):
1076 creator_name = os.path.basename(argv[0])
1077 if os.path.exists("/usr/local/bin/" + creator_name) \
1078 and os.path.exists("/usr/bin/" + creator_name):
1079 raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
1081 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1083 def get_source_repometadata(repometadata):
1085 for repo in repometadata:
1086 if repo["name"].endswith("-source"):
1087 src_repometadata.append(repo)
1088 if src_repometadata:
1089 return src_repometadata
1092 def get_src_name(srpm):
1093 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
1098 src_repometadata = get_source_repometadata(repometadata)
1100 if not src_repometadata:
1101 print "No source repo found"
1107 for repo in src_repometadata:
1108 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1109 lpkgs_path += glob.glob(cachepath)
1111 for lpkg in lpkgs_path:
1112 lpkg_name = get_src_name(os.path.basename(lpkg))
1113 lpkgs_dict[lpkg_name] = lpkg
1114 localpkgs = lpkgs_dict.keys()
1117 destdir = instroot+'/usr/src/SRPMS'
1118 if not os.path.exists(destdir):
1119 os.makedirs(destdir)
1123 srcpkg_name = get_source_name(_pkg, repometadata)
1126 srcpkgset.add(srcpkg_name)
1128 for pkg in list(srcpkgset):
1129 if pkg in localpkgs:
1131 shutil.copy(lpkgs_dict[pkg], destdir)
1132 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1134 src_pkg = get_package(pkg, src_repometadata, 'src')
1136 shutil.copy(src_pkg, destdir)
1137 src_pkgs.append(src_pkg)
1138 print '--------------------------------------------------'
1139 print "%d source packages gotten from cache" %cached_count
1143 def add_optparser(arg):
1145 if not hasattr(f, "optparser"):
1150 def setup_chrootenv(chrootdir, bindmounts = None):##move to mic/utils/misc
1151 global chroot_lockfd, chroot_lock
1152 def get_bind_mounts(chrootdir, bindmounts):
1154 if bindmounts in ("", None):
1156 mounts = bindmounts.split(";")
1157 for mount in mounts:
1160 srcdst = mount.split(":")
1161 srcdst[0] = os.path.abspath(os.path.expanduser(srcdst[0]))
1162 if len(srcdst) == 1:
1163 srcdst.append("none")
1164 if not os.path.isdir(srcdst[0]):
1166 if srcdst[0] in ("/proc", "/proc/sys/fs/binfmt_misc", "/", "/sys", "/dev", "/dev/pts", "/dev/shm", "/var/lib/dbus", "/var/run/dbus", "/var/lock"):
1167 pwarning("%s will be mounted by default." % srcdst[0])
1169 if srcdst[1] == "" or srcdst[1] == "none":
1172 srcdst[1] = os.path.abspath(os.path.expanduser(srcdst[1]))
1173 if os.path.isdir(chrootdir + "/" + srcdst[1]):
1174 pwarning("%s has existed in %s , skip it." % (srcdst[1], chrootdir))
1176 chrootmounts.append(fs_related.BindChrootMount(srcdst[0], chrootdir, srcdst[1]))
1178 """Default bind mounts"""
1179 chrootmounts.append(fs_related.BindChrootMount("/proc", chrootdir, None))
1180 chrootmounts.append(fs_related.BindChrootMount("/proc/sys/fs/binfmt_misc", chrootdir, None))
1181 chrootmounts.append(fs_related.BindChrootMount("/sys", chrootdir, None))
1182 chrootmounts.append(fs_related.BindChrootMount("/dev", chrootdir, None))
1183 chrootmounts.append(fs_related.BindChrootMount("/dev/pts", chrootdir, None))
1184 chrootmounts.append(fs_related.BindChrootMount("/dev/shm", chrootdir, None))
1185 chrootmounts.append(fs_related.BindChrootMount("/var/lib/dbus", chrootdir, None))
1186 chrootmounts.append(fs_related.BindChrootMount("/var/run/dbus", chrootdir, None))
1187 chrootmounts.append(fs_related.BindChrootMount("/var/lock", chrootdir, None))
1188 chrootmounts.append(fs_related.BindChrootMount("/", chrootdir, "/parentroot", "ro"))
1189 for kernel in os.listdir("/lib/modules"):
1190 chrootmounts.append(fs_related.BindChrootMount("/lib/modules/" + kernel, chrootdir, None, "ro"))
1194 def bind_mount(chrootmounts):
1195 for b in chrootmounts:
1196 print "bind_mount: %s -> %s" % (b.src, b.dest)
1199 def setup_resolv(chrootdir):
1200 shutil.copyfile("/etc/resolv.conf", chrootdir + "/etc/resolv.conf")
1202 globalmounts = get_bind_mounts(chrootdir, bindmounts)
1203 bind_mount(globalmounts)
1204 setup_resolv(chrootdir)
1206 dstmtab = chrootdir + mtab
1207 if not os.path.islink(dstmtab):
1208 shutil.copyfile(mtab, dstmtab)
1209 chroot_lock = os.path.join(chrootdir, ".chroot.lock")
1210 chroot_lockfd = open(chroot_lock, "w")
1213 def cleanup_chrootenv(chrootdir, bindmounts = None, globalmounts = []):
1214 global chroot_lockfd, chroot_lock
1215 def bind_unmount(chrootmounts):
1216 chrootmounts.reverse()
1217 for b in chrootmounts:
1218 print "bind_unmount: %s -> %s" % (b.src, b.dest)
1221 def cleanup_resolv(chrootdir):
1222 fd = open(chrootdir + "/etc/resolv.conf", "w")
1226 def kill_processes(chrootdir):
1227 for file in glob.glob("/proc/*/root"):
1229 if os.readlink(file) == chrootdir:
1230 pid = int(file.split("/")[2])
1235 def cleanup_mountdir(chrootdir, bindmounts):
1236 if bindmounts == "" or bindmounts == None:
1239 mounts = bindmounts.split(";")
1240 for mount in mounts:
1243 srcdst = mount.split(":")
1244 if len(srcdst) == 1:
1245 srcdst.append("none")
1246 if srcdst[1] == "" or srcdst[1] == "none":
1247 srcdst[1] = srcdst[0]
1248 srcdst[1] = os.path.abspath(os.path.expanduser(srcdst[1]))
1249 tmpdir = chrootdir + "/" + srcdst[1]
1250 if os.path.isdir(tmpdir):
1251 if len(os.listdir(tmpdir)) == 0:
1252 shutil.rmtree(tmpdir, ignore_errors = True)
1254 print "Warning: dir %s isn't empty." % tmpdir
1256 chroot_lockfd.close()
1257 bind_unmount(globalmounts)
1258 if not fs_releate.my_fuser(chroot_lock):
1259 tmpdir = chrootdir + "/parentroot"
1260 if len(os.listdir(tmpdir)) == 0:
1261 shutil.rmtree(tmpdir, ignore_errors = True)
1262 cleanup_resolv(chrootdir)
1263 if os.path.exists(chrootdir + "/etc/mtab"):
1264 os.unlink(chrootdir + "/etc/mtab")
1265 kill_processes(chrootdir)
1266 cleanup_mountdir(chrootdir, bindmounts)
1268 def chroot(chrootdir, bindmounts = None, execute = "/bin/bash"):
1270 os.chroot(chrootdir)
1273 dev_null = os.open("/dev/null", os.O_WRONLY)
1274 files_to_check = ["/bin/bash", "/sbin/init"]
1276 architecture_found = False
1278 """ Register statically-linked qemu-arm if it is an ARM fs """
1279 qemu_emulator = None
1281 for ftc in files_to_check:
1282 ftc = "%s/%s" % (chrootdir,ftc)
1284 # Return code of 'file' is "almost always" 0 based on some man pages
1285 # so we need to check the file existance first.
1286 if not os.path.exists(ftc):
1289 filecmd = find_binary_path("file")
1290 initp1 = subprocess.Popen([filecmd, ftc], stdout=subprocess.PIPE, stderr=dev_null)
1291 fileOutput = initp1.communicate()[0].strip().split("\n")
1293 for i in range(len(fileOutput)):
1294 if fileOutput[i].find("ARM") > 0:
1295 qemu_emulator = setup_qemu_emulator(chrootdir, "arm")
1296 architecture_found = True
1298 if fileOutput[i].find("Intel") > 0:
1299 architecture_found = True
1302 if architecture_found:
1306 if not architecture_found:
1307 raise errors.CreatorError("Failed to get architecture from any of the following files %s from chroot." % files_to_check)
1310 print "Launching shell. Exit to continue."
1311 print "----------------------------------"
1312 globalmounts = setup_chrootenv(chrootdir, bindmounts)
1313 args = shlex.split(execute)
1314 subprocess.call(args, preexec_fn = mychroot)
1315 except OSError, (err, msg):
1316 raise errors.CreatorError("Failed to chroot: %s" % msg)
1318 cleanup_chrootenv(chrootdir, bindmounts, globalmounts)
1320 os.unlink(chrootdir + qemu_emulator)