2 # misc.py : miscellaneous utilities
4 # Copyright 2010, Intel Inc.
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Library General Public License for more details.
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 import xml.dom.minidom
35 import sqlite3 as sqlite
41 from xml.etree import cElementTree
44 xmlparse = cElementTree.parse
47 from fs_related import *
52 locale.setlocale(locale.LC_ALL,'')
54 os.environ['LC_ALL'] = 'C'
55 locale.setlocale(locale.LC_ALL,'C')
56 sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
57 sys.stdout.errors = 'replace'
59 def get_extension_name(path):
60 match = re.search("(?<=\.)\w+$", path)
66 def get_image_type(path):
67 if os.path.isdir(path):
71 maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
72 extension = get_extension_name(path)
73 if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
74 return maptab[extension]
77 file_header = fd.read(1024)
79 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
80 if file_header[0:len(vdi_flag)] == vdi_flag:
83 dev_null = os.open("/dev/null", os.O_WRONLY)
84 filecmd = find_binary_path("file")
85 args = [ filecmd, path ]
86 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
87 output = file.communicate()[0]
89 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
90 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
91 rawptn = re.compile(r".*x86 boot sector.*")
92 vmdkptn = re.compile(r".*VMware. disk image.*")
93 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
94 if isoptn.match(output):
96 elif usbimgptn.match(output):
97 return maptab["usbimg"]
98 elif rawptn.match(output):
100 elif vmdkptn.match(output):
101 return maptab["vmdk"]
102 elif ext3fsimgptn.match(output):
107 def get_file_size(file):
108 """Return size in MB unit"""
109 du = find_binary_path("du")
110 dev_null = os.open("/dev/null", os.O_WRONLY)
111 duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
112 stdout=subprocess.PIPE, stderr=dev_null)
113 duOutput = duProc.communicate()[0]
114 if duProc.returncode:
115 raise CreatorError("Failed to run %s" % du)
117 size1 = int(duOutput.split()[0])
118 duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
119 stdout=subprocess.PIPE, stderr=dev_null)
120 duOutput = duProc.communicate()[0]
121 if duProc.returncode:
122 raise CreatorError("Failed to run %s" % du)
124 size2 = int(duOutput.split()[0])
131 def get_filesystem_avail(fs):
132 vfstat = os.statvfs(fs)
133 return vfstat.f_bavail * vfstat.f_bsize
135 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
138 raise CreatorError("Invalid destination image format: %s" % dstfmt)
139 logging.debug("converting %s image to %s" % (srcimg, dstimg))
141 path = find_binary_path("qemu-img")
142 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
143 elif srcfmt == "vdi":
144 path = find_binary_path("VBoxManage")
145 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
147 raise CreatorError("Invalid soure image format: %s" % srcfmt)
149 rc = subprocess.call(argv)
151 logging.debug("convert successful")
153 raise CreatorError("Unable to convert disk to %s" % dstfmt)
155 def myxcopytree(src, dst):
156 dev_null = os.open("/dev/null", os.O_WRONLY)
157 dirnames = os.listdir(src)
158 copycmd = find_binary_path("cp")
160 args = [ copycmd, "-af", src + "/" + dir, dst ]
161 subprocess.call(args, stdout=dev_null, stderr=dev_null)
163 ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
164 for exclude in ignores:
165 if os.path.exists(dst + "/" + exclude):
166 os.unlink(dst + "/" + exclude)
168 def uncompress_squashfs(squashfsimg, outdir):
169 """Uncompress file system from squshfs image"""
170 unsquashfs = find_binary_path("unsquashfs")
171 args = [ unsquashfs, "-d", outdir, squashfsimg ]
172 rc = subprocess.call(args)
174 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
176 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
178 return tempfile.mkdtemp(dir = dir, prefix = prefix)
180 def ismeego(rootdir):
182 if (os.path.exists(rootdir + "/etc/moblin-release") \
183 or os.path.exists(rootdir + "/etc/meego-release")) \
184 and os.path.exists(rootdir + "/etc/inittab") \
185 and os.path.exists(rootdir + "/etc/rc.sysinit") \
186 and glob.glob(rootdir + "/boot/vmlinuz-*"):
192 def is_meego_bootstrap(rootdir):
194 if (os.path.exists(rootdir + "/etc/moblin-release") \
195 or os.path.exists(rootdir + "/etc/meego-release")) \
196 and os.path.exists(rootdir + "/usr/bin/python") \
197 and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
205 _my_noproxy_list = []
207 def set_proxy_environ():
208 global _my_noproxy, _my_proxies
211 for key in _my_proxies.keys():
212 os.environ[key + "_proxy"] = _my_proxies[key]
215 os.environ["no_proxy"] = _my_noproxy
217 def unset_proxy_environ():
218 if os.environ.has_key("http_proxy"):
219 del os.environ["http_proxy"]
220 if os.environ.has_key("https_proxy"):
221 del os.environ["https_proxy"]
222 if os.environ.has_key("ftp_proxy"):
223 del os.environ["ftp_proxy"]
224 if os.environ.has_key("all_proxy"):
225 del os.environ["all_proxy"]
226 if os.environ.has_key("no_proxy"):
227 del os.environ["no_proxy"]
228 if os.environ.has_key("HTTP_PROXY"):
229 del os.environ["HTTP_PROXY"]
230 if os.environ.has_key("HTTPS_PROXY"):
231 del os.environ["HTTPS_PROXY"]
232 if os.environ.has_key("FTP_PROXY"):
233 del os.environ["FTP_PROXY"]
234 if os.environ.has_key("ALL_PROXY"):
235 del os.environ["ALL_PROXY"]
236 if os.environ.has_key("NO_PROXY"):
237 del os.environ["NO_PROXY"]
239 def _set_proxies(proxy = None, no_proxy = None):
240 """Return a dictionary of scheme -> proxy server URL mappings."""
241 global _my_noproxy, _my_proxies
246 proxies.append(("http_proxy", proxy))
248 proxies.append(("no_proxy", no_proxy))
250 """Get proxy settings from environment variables if not provided"""
251 if not proxy and not no_proxy:
252 proxies = os.environ.items()
254 """ Remove proxy env variables, urllib2 can't handle them correctly """
255 unset_proxy_environ()
257 for name, value in proxies:
259 if value and name[-6:] == '_proxy':
260 if name[0:2] != "no":
261 _my_proxies[name[:-6]] = value
268 for dec in ip.split("."):
269 ipint |= int(dec) << shift
279 ipaddr = ".%d%s" % (dec, ipaddr)
284 if host.replace(".", "").isdigit():
288 def set_noproxy_list():
289 global _my_noproxy, _my_noproxy_list
290 _my_noproxy_list = []
293 for item in _my_noproxy.split(","):
297 if item[0] != '.' and item.find("/") == -1:
298 """ Need to match it """
299 _my_noproxy_list.append({"match":0,"needle":item})
301 """ Need to match at tail """
302 _my_noproxy_list.append({"match":1,"needle":item})
303 elif item.find("/") > 3:
304 """ IP/MASK, need to match at head """
305 needle = item[0:item.find("/")].strip()
306 ip = ip_to_int(needle)
308 mask = item[item.find("/")+1:].strip()
312 netmask = ~((1<<(32-netmask)) - 1)
317 for dec in mask.split("."):
318 netmask |= int(dec) << shift
321 _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
324 (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
326 user_pass, host = host.split('@', 1)
328 host, port = host.split(':', 1)
329 hostisip = isip(host)
330 for item in _my_noproxy_list:
331 if hostisip and item["match"] <= 1:
333 if item["match"] == 2 and hostisip:
334 if (ip_to_int(host) & item["netmask"]) == item["needle"]:
336 if item["match"] == 0:
337 if host == item["needle"]:
339 if item["match"] == 1:
340 if host.rfind(item["needle"]) > 0:
344 def set_proxies(proxy = None, no_proxy = None):
345 _set_proxies(proxy, no_proxy)
349 if url[0:4] == "file" or isnoproxy(url):
351 type = url[0:url.index(":")]
353 if _my_proxies.has_key(type):
354 proxy = _my_proxies[type]
355 elif _my_proxies.has_key("http"):
356 proxy = _my_proxies["http"]
361 def remap_repostr(repostr, siteconf):
362 items = repostr.split(",")
366 subitems = item.split(":")
367 if subitems[0] == "name":
369 if subitems[0] == "baseurl":
374 for section in siteconf._sections:
375 if section != "main":
376 if not siteconf.has_option(section, "enabled") or siteconf.get(section, "enabled") == "0":
378 if siteconf.has_option(section, "equalto"):
379 equalto = siteconf.get(section, "equalto")
380 if (name and equalto == name) or (baseurl and equalto == baseurl):
381 remap_baseurl = siteconf.get(section, "baseurl")
382 repostr = repostr.replace(baseurl, remap_baseurl)
388 def get_temp_reponame(baseurl):
389 md5obj = hashlib.md5(baseurl)
390 tmpreponame = "%s" % md5obj.hexdigest()
393 def get_repostr(repo, siteconf = None):
395 repo = remap_repostr(repo, siteconf)
396 keys = ("baseurl", "mirrorlist", "name", "cost", "includepkgs", "excludepkgs", "proxy", "save", "proxyuser", "proxypasswd", "debuginfo", "source", "gpgkey")
398 items = repo.split(",")
400 subitems = items[0].split(":")
401 if len(subitems) == 1:
403 repostr += " --baseurl=%s" % url
404 elif subitems[0] == "baseurl":
406 repostr += " --baseurl=%s" % url
407 elif subitems[0] in ("http", "ftp", "https", "ftps", "file"):
409 repostr += " --baseurl=%s" % url
411 raise ValueError("Invalid repo string")
412 if url.find("://") == -1 \
413 or url[0:url.index("://")] not in ("http", "ftp", "https", "ftps", "file") \
414 or url.find("/", url.index("://")+3) == -1:
415 raise ValueError("Invalid repo string")
417 if repo.find("baseurl:") == -1 and repo.find("mirrorlist:") == -1:
418 raise ValueError("Invalid repo string")
423 subitems = item.split(":")
424 if subitems[0] in keys:
425 if subitems[0] in ("baseurl", "mirrorlist"):
426 url = item[len(subitems[0])+1:]
427 if subitems[0] in ("save", "debuginfo", "source"):
428 repostr += " --%s" % subitems[0]
429 elif subitems[0] in ("includepkgs", "excludepkgs"):
430 repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:].replace(";", ","))
432 repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:])
434 raise ValueError("Invalid repo string")
435 if url.find("://") != -1 \
436 and url[0:url.index("://")] in ("http", "ftp", "https", "ftps", "file") \
437 and url.find("/", url.index("://")+3) != -1:
438 if repostr.find("--proxy=") == -1:
439 proxy = get_proxy(url)
441 repostr += " --proxy=%s" % proxy
443 raise ValueError("Invalid repo string")
445 if repostr.find("--name=") == -1:
446 repostr += " --name=%s" % get_temp_reponame(url)
450 DEFAULT_SITECONF_GLOBAL="/etc/mic2/mic2.conf"
451 DEFAULT_SITECONF_USER="~/.mic2.conf"
453 def read_siteconf(siteconf = None):
454 from ConfigParser import SafeConfigParser
456 my_siteconf_parser = SafeConfigParser()
458 global_siteconf = DEFAULT_SITECONF_GLOBAL
459 if os.path.isfile(global_siteconf):
460 my_siteconf_parser.read(global_siteconf)
462 local_siteconf = os.path.expanduser(DEFAULT_SITECONF_USER)
463 if os.path.isfile(local_siteconf):
464 my_siteconf_parser.read(local_siteconf)
466 my_siteconf_parser.read(siteconf)
468 if not my_siteconf_parser.sections():
471 return my_siteconf_parser
473 def output_siteconf(siteconf):
478 for section in siteconf.sections():
479 output += "[%s]\n" % section
480 for option in siteconf.options(section):
481 output += "%s=%s\n" % (option, siteconf.get(section, option))
487 def get_repostrs_from_ks(ks):
489 for repodata in ks.handler.repo.repoList:
491 if hasattr(repodata, "name") and repodata.name:
492 repostr += ",name:" + repodata.name
493 if hasattr(repodata, "baseurl") and repodata.baseurl:
494 repostr += ",baseurl:" + repodata.baseurl
495 if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
496 repostr += ",mirrorlist:" + repodata.mirrorlist
497 if hasattr(repodata, "includepkgs") and repodata.includepkgs:
498 repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
499 if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
500 repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
501 if hasattr(repodata, "cost") and repodata.cost:
502 repostr += ",cost:%d" % repodata.cost
503 if hasattr(repodata, "save") and repodata.save:
505 if hasattr(repodata, "proxy") and repodata.proxy:
506 repostr += ",proxy:" + repodata.proxy
507 if hasattr(repodata, "proxyuser") and repodata.proxy_username:
508 repostr += ",proxyuser:" + repodata.proxy_username
509 if hasattr(repodata, "proxypasswd") and repodata.proxy_password:
510 repostr += ",proxypasswd:" + repodata.proxy_password
511 if repostr.find("name:") == -1:
512 repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
513 if hasattr(repodata, "debuginfo") and repodata.debuginfo:
514 repostr += ",debuginfo:"
515 if hasattr(repodata, "source") and repodata.source:
516 repostr += ",source:"
517 if hasattr(repodata, "gpgkey") and repodata.gpgkey:
518 repostr += ",gpgkey:" + repodata.gpgkey
519 kickstart_repos.append(repostr[1:])
520 return kickstart_repos
522 def get_repostrs_from_siteconf(siteconf):
527 for section in siteconf._sections:
528 if section != "main":
530 if siteconf.has_option(section, "enabled") \
531 and siteconf.get(section, "enabled") == "1" \
532 and (not siteconf.has_option(section, "equalto") or not siteconf.get(section, "equalto")):
533 if siteconf.has_option(section, "name") and siteconf.get(section, "name"):
534 repostr += ",name:%s" % siteconf.get(section, "name")
535 if siteconf.has_option(section, "baseurl") and siteconf.get(section, "baseurl"):
536 repostr += ",baseurl:%s" % siteconf.get(section, "baseurl")
537 if siteconf.has_option(section, "mirrorlist") and siteconf.get(section, "mirrorlist"):
538 repostr += ",mirrorlist:%s" % siteconf.get(section, "mirrorlist")
539 if siteconf.has_option(section, "includepkgs") and siteconf.get(section, "includepkgs"):
540 repostr += ",includepkgs:%s" % siteconf.get(section, "includepkgs").replace(",", ";")
541 if siteconf.has_option(section, "excludepkgs") and siteconf.get(section, "excludepkgs"):
542 repostr += ",excludepkgs:%s" % siteconf.get(section, "excludepkgs").replace(",", ";")
543 if siteconf.has_option(section, "cost") and siteconf.get(section, "cost"):
544 repostr += ",cost:%s" % siteconf.get(section, "cost")
545 if siteconf.has_option(section, "save") and siteconf.get(section, "save"):
547 if siteconf.has_option(section, "proxy") and siteconf.get(section, "proxy"):
548 repostr += ",proxy:%s" % siteconf.get(section, "proxy")
549 if siteconf.has_option(section, "proxy_username") and siteconf.get(section, "proxy_username"):
550 repostr += ",proxyuser:%s" % siteconf.get(section, "proxy_username")
551 if siteconf.has_option(section, "proxy_password") and siteconf.get(section, "proxy_password"):
552 repostr += ",proxypasswd:%s" % siteconf.get(section, "proxy_password")
554 if repostr.find("name:") == -1:
555 repostr = ",name:%s" % get_temp_reponame()
556 site_repos.append(repostr[1:])
559 def get_uncompressed_data_from_url(url, filename, proxies):
560 filename = myurlgrab(url, filename, proxies)
562 if filename.endswith(".gz"):
564 gunzip = find_binary_path('gunzip')
565 subprocess.call([gunzip, "-f", filename])
566 elif filename.endswith(".bz2"):
568 bunzip2 = find_binary_path('bunzip2')
569 subprocess.call([bunzip2, "-f", filename])
571 filename = filename.replace(suffix, "")
574 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
575 url = str(baseurl + "/" + filename)
576 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
577 return get_uncompressed_data_from_url(url,filename_tmp,proxies)
579 def get_metadata_from_repos(repostrs, cachedir):
581 CreatorError("No cache dir defined.")
583 my_repo_metadata = []
584 for repostr in repostrs:
588 items = repostr.split(",")
590 subitems = item.split(":")
591 if subitems[0] == "name":
592 reponame = subitems[1]
593 if subitems[0] == "baseurl":
595 if subitems[0] == "proxy":
597 if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
600 proxy = get_proxy(baseurl)
603 proxies = {str(proxy.split(":")[0]):str(proxy)}
604 makedirs(cachedir + "/" + reponame)
605 url = str(baseurl + "/repodata/repomd.xml")
606 filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
607 repomd = myurlgrab(url, filename, proxies)
609 root = xmlparse(repomd)
611 raise CreatorError("repomd.xml syntax error.")
613 ns = root.getroot().tag
614 ns = ns[0:ns.rindex("}")+1]
617 for elm in root.getiterator("%sdata" % ns):
618 if elm.attrib["type"] == "patterns":
619 patterns = elm.find("%slocation" % ns).attrib['href']
623 for elm in root.getiterator("%sdata" % ns):
624 if elm.attrib["type"] == "group_gz":
625 comps = elm.find("%slocation" % ns).attrib['href']
628 for elm in root.getiterator("%sdata" % ns):
629 if elm.attrib["type"] == "group":
630 comps = elm.find("%slocation" % ns).attrib['href']
634 for elm in root.getiterator("%sdata" % ns):
635 if elm.attrib["type"] == "primary_db":
636 primary_type=".sqlite"
640 for elm in root.getiterator("%sdata" % ns):
641 if elm.attrib["type"] == "primary":
648 primary = elm.find("%slocation" % ns).attrib['href']
649 primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
652 patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
655 comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
659 repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
662 print "Warning: can't get %s/%s" % (baseurl, "repodata/repomd.xml.key")
664 my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
665 return my_repo_metadata
667 def get_arch(repometadata):
669 for repo in repometadata:
670 if repo["primary"].endswith(".xml"):
671 root = xmlparse(repo["primary"])
672 ns = root.getroot().tag
673 ns = ns[0:ns.rindex("}")+1]
674 for elm in root.getiterator("%spackage" % ns):
675 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
676 arch = elm.find("%sarch" % ns).text
677 if arch not in archlist:
678 archlist.append(arch)
679 elif repo["primary"].endswith(".sqlite"):
680 con = sqlite.connect(repo["primary"])
681 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
682 if row[0] not in archlist:
683 archlist.append(row[0])
689 def get_package(pkg, repometadata, arch = None):
692 for repo in repometadata:
693 if repo["primary"].endswith(".xml"):
694 root = xmlparse(repo["primary"])
695 ns = root.getroot().tag
696 ns = ns[0:ns.rindex("}")+1]
697 for elm in root.getiterator("%spackage" % ns):
698 if elm.find("%sname" % ns).text == pkg:
699 if elm.find("%sarch" % ns).text != "src":
700 version = elm.find("%sversion" % ns)
701 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
704 location = elm.find("%slocation" % ns)
705 pkgpath = "%s" % location.attrib['href']
708 if repo["primary"].endswith(".sqlite"):
709 con = sqlite.connect(repo["primary"])
711 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
712 tmpver = "%s-%s" % (row[0], row[1])
714 pkgpath = "%s" % row[2]
718 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
719 tmpver = "%s-%s" % (row[0], row[1])
721 pkgpath = "%s" % row[2]
726 makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
727 url = str(target_repo["baseurl"] + "/" + pkgpath)
728 filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
729 pkg = myurlgrab(url, filename, target_repo["proxies"])
734 def get_source_name(pkg, repometadata):
736 def get_bin_name(pkg):
737 m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
742 def get_src_name(srpm):
743 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
751 pkg_name = get_bin_name(pkg)
755 for repo in repometadata:
756 if repo["primary"].endswith(".xml"):
757 root = xmlparse(repo["primary"])
758 ns = root.getroot().tag
759 ns = ns[0:ns.rindex("}")+1]
760 for elm in root.getiterator("%spackage" % ns):
761 if elm.find("%sname" % ns).text == pkg_name:
762 if elm.find("%sarch" % ns).text != "src":
763 version = elm.find("%sversion" % ns)
764 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
767 fmt = elm.find("%sformat" % ns)
769 fns = fmt.getchildren()[0].tag
770 fns = fns[0:fns.rindex("}")+1]
771 pkgpath = fmt.find("%ssourcerpm" % fns).text
775 if repo["primary"].endswith(".sqlite"):
776 con = sqlite.connect(repo["primary"])
777 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
778 tmpver = "%s-%s" % (row[0], row[1])
780 pkgpath = "%s" % row[2]
785 return get_src_name(pkgpath)
789 def get_release_no(repometadata, distro="meego"):
790 cpio = find_binary_path("cpio")
791 rpm2cpio = find_binary_path("rpm2cpio")
792 release_pkg = get_package("%s-release" % distro, repometadata)
797 p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
798 p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
800 f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
804 shutil.rmtree(tmpdir, ignore_errors = True)
805 return content.split(" ")[2]
809 def get_kickstarts_from_repos(repometadata):
811 for repo in repometadata:
813 root = xmlparse(repo["repomd"])
815 raise CreatorError("repomd.xml syntax error.")
817 ns = root.getroot().tag
818 ns = ns[0:ns.rindex("}")+1]
820 for elm in root.getiterator("%sdata" % ns):
821 if elm.attrib["type"] == "image-config":
824 if elm.attrib["type"] != "image-config":
827 location = elm.find("%slocation" % ns)
828 image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
829 filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
831 image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
834 root = xmlparse(image_config)
836 raise CreatorError("image-config.xml syntax error.")
838 for elm in root.getiterator("config"):
839 path = elm.find("path").text
840 path = path.replace("images-config", "image-config")
841 description = elm.find("description").text
842 makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
844 if "http" not in path:
845 url = str(repo["baseurl"] + "/" + path)
846 filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
847 path = myurlgrab(url, filename, repo["proxies"])
848 kickstarts.append({"filename":path,"description":description})
851 def select_ks(ksfiles):
852 print "Available kickstart files:"
856 print "\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"]))
858 choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
859 if choice.lower() == "q":
863 if choice >= 1 and choice <= i:
866 return ksfiles[choice-1]["filename"]
869 def get_pkglist_in_patterns(group, patterns):
873 root = xmlparse(patterns)
875 raise SyntaxError("%s syntax error." % patterns)
877 for elm in list(root.getroot()):
879 ns = ns[0:ns.rindex("}")+1]
880 name = elm.find("%sname" % ns)
881 summary = elm.find("%ssummary" % ns)
882 if name.text == group or summary.text == group:
890 for requires in list(elm):
891 if requires.tag.endswith("requires"):
898 for pkg in list(requires):
899 pkgname = pkg.attrib["name"]
900 if pkgname not in pkglist:
901 pkglist.append(pkgname)
905 def get_pkglist_in_comps(group, comps):
909 root = xmlparse(comps)
911 raise SyntaxError("%s syntax error." % comps)
913 for elm in root.getiterator("group"):
915 name = elm.find("name")
916 if id.text == group or name.text == group:
917 packagelist = elm.find("packagelist")
924 for require in elm.getiterator("packagereq"):
925 if require.tag.endswith("packagereq"):
926 pkgname = require.text
927 if pkgname not in pkglist:
928 pkglist.append(pkgname)
932 def is_statically_linked(binary):
934 dev_null = os.open("/dev/null", os.O_WRONLY)
935 filecmd = find_binary_path("file")
936 args = [ filecmd, binary ]
937 file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
938 output = file.communicate()[0]
940 if output.find(", statically linked, ") > 0:
944 def setup_qemu_emulator(rootdir, arch):
945 # mount binfmt_misc if it doesn't exist
946 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
947 modprobecmd = find_binary_path("modprobe")
948 subprocess.call([modprobecmd, "binfmt_misc"])
949 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
950 mountcmd = find_binary_path("mount")
951 subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
953 # qemu_emulator is a special case, we can't use find_binary_path
954 # qemu emulator should be a statically-linked executable file
955 qemu_emulator = "/usr/bin/qemu-arm"
956 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
957 qemu_emulator = "/usr/bin/qemu-arm-static"
958 if not os.path.exists(qemu_emulator):
959 raise CreatorError("Please install a statically-linked qemu-arm")
960 if not os.path.exists(rootdir + "/usr/bin"):
961 makedirs(rootdir + "/usr/bin")
962 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
964 # disable selinux, selinux will block qemu emulator to run
965 if os.path.exists("/usr/sbin/setenforce"):
966 subprocess.call(["/usr/sbin/setenforce", "0"])
968 node = "/proc/sys/fs/binfmt_misc/arm"
969 if is_statically_linked(qemu_emulator) and os.path.exists(node):
972 # unregister it if it has been registered and is a dynamically-linked executable
973 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
974 qemu_unregister_string = "-1\n"
975 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
976 fd.write(qemu_unregister_string)
979 # register qemu emulator for interpreting other arch executable file
980 if not os.path.exists(node):
981 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
982 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
983 fd.write(qemu_arm_string)
988 def create_release(config, destdir, name, outimages, release):
989 """ TODO: This functionality should really be in creator.py inside the
990 ImageCreator class. """
992 # For virtual machine images, we have a subdir for it, this is unnecessary
995 for i in range(len(outimages)):
997 if not os.path.isdir(file) and os.path.dirname(file) != destdir:
998 thatsubdir = os.path.dirname(file)
999 newfile = os.path.join(destdir, os.path.basename(file))
1000 shutil.move(file, newfile)
1001 outimages[i] = newfile
1003 shutil.rmtree(thatsubdir, ignore_errors = True)
1005 """ Create release directory and files """
1006 os.system ("cp %s %s/%s.ks" % (config, destdir, name))
1007 # When building a release we want to make sure the .ks
1008 # file generates the same build even when --release= is not used.
1009 fd = open(config, "r")
1012 kscont = kscont.replace("@BUILD_ID@",release)
1013 fd = open("%s/%s.ks" % (destdir,name), "w")
1016 outimages.append("%s/%s.ks" % (destdir,name))
1018 # Using system + mv, because of * in filename.
1019 os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
1020 outimages.append("%s/%s.packages" % (destdir,name))
1022 d = os.listdir(destdir)
1024 if f.endswith(".iso"):
1025 ff = f.replace(".iso", ".img")
1026 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1027 outimages.append("%s/%s" %(destdir, ff))
1028 elif f.endswith(".usbimg"):
1029 ff = f.replace(".usbimg", ".img")
1030 os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1031 outimages.append("%s/%s" %(destdir, ff))
1033 fd = open(destdir + "/MANIFEST", "w")
1034 d = os.listdir(destdir)
1038 if os.path.exists("/usr/bin/md5sum"):
1039 p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
1040 stdout=subprocess.PIPE)
1041 (md5sum, errorstr) = p.communicate()
1042 if p.returncode != 0:
1043 logging.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
1045 md5sum = md5sum.split(" ")[0]
1046 fd.write(md5sum+" "+f+"\n")
1048 outimages.append("%s/MANIFEST" % destdir)
1051 """ Update the file list. """
1053 for file in outimages:
1054 if os.path.exists("%s" % file):
1055 updated_list.append(file)
1059 def get_local_distro():
1060 print "Local linux distribution:"
1061 for file in glob.glob("/etc/*-release"):
1062 fd = open(file, "r")
1066 if os.path.exists("/etc/issue"):
1067 fd = open("/etc/issue", "r")
1071 print "Local Kernel version: " + os.uname()[2]
1073 def check_mic_installation(argv):
1074 creator_name = os.path.basename(argv[0])
1075 if os.path.exists("/usr/local/bin/" + creator_name) \
1076 and os.path.exists("/usr/bin/" + creator_name):
1077 raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
1079 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1081 def get_source_repometadata(repometadata):
1083 for repo in repometadata:
1084 if repo["name"].endswith("-source"):
1085 src_repometadata.append(repo)
1086 if src_repometadata:
1087 return src_repometadata
1090 def get_src_name(srpm):
1091 m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
1096 src_repometadata = get_source_repometadata(repometadata)
1098 if not src_repometadata:
1099 print "No source repo found"
1105 for repo in src_repometadata:
1106 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1107 lpkgs_path += glob.glob(cachepath)
1109 for lpkg in lpkgs_path:
1110 lpkg_name = get_src_name(os.path.basename(lpkg))
1111 lpkgs_dict[lpkg_name] = lpkg
1112 localpkgs = lpkgs_dict.keys()
1115 destdir = instroot+'/usr/src/SRPMS'
1116 if not os.path.exists(destdir):
1117 os.makedirs(destdir)
1121 srcpkg_name = get_source_name(_pkg, repometadata)
1124 srcpkgset.add(srcpkg_name)
1126 for pkg in list(srcpkgset):
1127 if pkg in localpkgs:
1129 shutil.copy(lpkgs_dict[pkg], destdir)
1130 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1132 src_pkg = get_package(pkg, src_repometadata, 'src')
1134 shutil.copy(src_pkg, destdir)
1135 src_pkgs.append(src_pkg)
1136 print '--------------------------------------------------'
1137 print "%d source packages gotten from cache" %cached_count
1141 def add_optparser(arg):
1143 if not hasattr(f, "optparser"):