3082fe7afb48c0cd9e218128b6d9b4f05c348479
[platform/upstream/mic.git] / mic / utils / misc.py
1 #
2 # misc.py : miscellaneous utilities
3 #
4 # Copyright 2010, 2011 Intel Inc.
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Library General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18
19
20 import os
21 import sys
22 import subprocess
23 import tempfile
24 import re
25 import shutil
26 import glob
27 import hashlib
28 import urlparse
29
30 try:
31     import sqlite3 as sqlite
32 except ImportError:
33     import sqlite
34
35 try:
36     from xml.etree import cElementTree
37 except ImportError:
38     import cElementTree
39 xmlparse = cElementTree.parse
40
41 from errors import *
42 from fs_related import *
43
44 from mic import msger
45
46 def setlocale():
47     import locale
48     import codecs
49
50     try:
51         locale.setlocale(locale.LC_ALL,'')
52     except locale.Error:
53         os.environ['LC_ALL'] = 'C'
54         locale.setlocale(locale.LC_ALL,'C')
55     sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
56     sys.stdout.errors = 'replace'
57
58 def get_extension_name(path):
59     match = re.search("(?<=\.)\w+$", path)
60     if match:
61         return match.group(0)
62     else:
63         return None
64
65 def get_image_type(path):
66     if os.path.isdir(path):
67         if ismeego(path):
68             return "fs"
69         return None
70     maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
71     extension = get_extension_name(path)
72     if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
73         return maptab[extension]
74
75     fd = open(path, "rb")
76     file_header = fd.read(1024)
77     fd.close()
78     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
79     if file_header[0:len(vdi_flag)] == vdi_flag:
80         return maptab["vdi"]
81
82     dev_null = os.open("/dev/null", os.O_WRONLY)
83     filecmd = find_binary_path("file")
84     args = [ filecmd, path ]
85     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
86     output = file.communicate()[0]
87     os.close(dev_null)
88     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
89     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
90     rawptn = re.compile(r".*x86 boot sector.*")
91     vmdkptn = re.compile(r".*VMware. disk image.*")
92     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
93     if isoptn.match(output):
94         return maptab["iso"]
95     elif usbimgptn.match(output):
96         return maptab["usbimg"]
97     elif rawptn.match(output):
98         return maptab["raw"]
99     elif vmdkptn.match(output):
100         return maptab["vmdk"]
101     elif ext3fsimgptn.match(output):
102         return "ext3fsimg"
103     else:
104         return None
105
106 def get_file_size(file):
107     """Return size in MB unit"""
108     du = find_binary_path("du")
109     dev_null = os.open("/dev/null", os.O_WRONLY)
110     duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
111                                stdout=subprocess.PIPE, stderr=dev_null)
112     duOutput = duProc.communicate()[0]
113     if duProc.returncode:
114         raise CreatorError("Failed to run %s" % du)
115
116     size1 = int(duOutput.split()[0])
117     duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
118                                stdout=subprocess.PIPE, stderr=dev_null)
119     duOutput = duProc.communicate()[0]
120     if duProc.returncode:
121         raise CreatorError("Failed to run %s" % du)
122
123     size2 = int(duOutput.split()[0])
124     os.close(dev_null)
125     if size1 > size2:
126         return size1
127     else:
128         return size2
129
130 def get_filesystem_avail(fs):
131     vfstat = os.statvfs(fs)
132     return vfstat.f_bavail * vfstat.f_bsize
133
134 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
135     #convert disk format
136     if dstfmt != "raw":
137         raise CreatorError("Invalid destination image format: %s" % dstfmt)
138     msger.debug("converting %s image to %s" % (srcimg, dstimg))
139     if srcfmt == "vmdk":
140         path = find_binary_path("qemu-img")
141         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
142     elif srcfmt == "vdi":
143         path = find_binary_path("VBoxManage")
144         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
145     else:
146         raise CreatorError("Invalid soure image format: %s" % srcfmt)
147
148     rc = subprocess.call(argv)
149     if rc == 0:
150         msger.debug("convert successful")
151     if rc != 0:
152         raise CreatorError("Unable to convert disk to %s" % dstfmt)
153
154 def myxcopytree(src, dst):
155     dev_null = os.open("/dev/null", os.O_WRONLY)
156     dirnames = os.listdir(src)
157     copycmd = find_binary_path("cp")
158     for dir in dirnames:
159         args = [ copycmd, "-af", src + "/" + dir, dst ]
160         subprocess.call(args, stdout=dev_null, stderr=dev_null)
161     os.close(dev_null)
162     ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
163     for exclude in ignores:
164         if os.path.exists(dst + "/" + exclude):
165             os.unlink(dst + "/" + exclude)
166
167 def uncompress_squashfs(squashfsimg, outdir):
168     """Uncompress file system from squshfs image"""
169     unsquashfs = find_binary_path("unsquashfs")
170     args = [ unsquashfs, "-d", outdir, squashfsimg ]
171     rc = subprocess.call(args)
172     if (rc != 0):
173         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
174
175 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
176     makedirs(dir)
177     return tempfile.mkdtemp(dir = dir, prefix = prefix)
178
179 def ismeego(rootdir):
180     ret = False
181     if (os.path.exists(rootdir + "/etc/moblin-release") \
182        or os.path.exists(rootdir + "/etc/meego-release")) \
183        and os.path.exists(rootdir + "/etc/inittab") \
184        and os.path.exists(rootdir + "/etc/rc.sysinit") \
185        and glob.glob(rootdir + "/boot/vmlinuz-*"):
186         ret = True
187
188     return ret
189
190
191 def is_meego_bootstrap(rootdir):
192     ret = False
193     if (os.path.exists(rootdir + "/etc/moblin-release") \
194        or os.path.exists(rootdir + "/etc/meego-release")) \
195        and os.path.exists(rootdir + "/usr/bin/python") \
196        and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
197         ret = True
198
199     return ret
200
201 _my_proxies = {}
202 _my_noproxy = None
203 _my_noproxy_list = []
204
205 def set_proxy_environ():
206     global _my_noproxy, _my_proxies
207     if not _my_proxies:
208         return
209     for key in _my_proxies.keys():
210         os.environ[key + "_proxy"] = _my_proxies[key]
211     if not _my_noproxy:
212         return
213     os.environ["no_proxy"] = _my_noproxy
214
215 def unset_proxy_environ():
216    if os.environ.has_key("http_proxy"):
217        del os.environ["http_proxy"]
218    if os.environ.has_key("https_proxy"):
219        del os.environ["https_proxy"]
220    if os.environ.has_key("ftp_proxy"):
221        del os.environ["ftp_proxy"]
222    if os.environ.has_key("all_proxy"):
223        del os.environ["all_proxy"]
224    if os.environ.has_key("no_proxy"):
225        del os.environ["no_proxy"]
226    if os.environ.has_key("HTTP_PROXY"):
227        del os.environ["HTTP_PROXY"]
228    if os.environ.has_key("HTTPS_PROXY"):
229        del os.environ["HTTPS_PROXY"]
230    if os.environ.has_key("FTP_PROXY"):
231        del os.environ["FTP_PROXY"]
232    if os.environ.has_key("ALL_PROXY"):
233        del os.environ["ALL_PROXY"]
234    if os.environ.has_key("NO_PROXY"):
235        del os.environ["NO_PROXY"]
236
237 def _set_proxies(proxy = None, no_proxy = None):
238     """Return a dictionary of scheme -> proxy server URL mappings."""
239     global _my_noproxy, _my_proxies
240     _my_proxies = {}
241     _my_noproxy = None
242     proxies = []
243     if proxy:
244        proxies.append(("http_proxy", proxy))
245     if no_proxy:
246        proxies.append(("no_proxy", no_proxy))
247
248     """Get proxy settings from environment variables if not provided"""
249     if not proxy and not no_proxy:
250        proxies = os.environ.items()
251
252        """ Remove proxy env variables, urllib2 can't handle them correctly """
253        unset_proxy_environ()
254
255     for name, value in proxies:
256         name = name.lower()
257         if value and name[-6:] == '_proxy':
258             if name[0:2] != "no":
259                 _my_proxies[name[:-6]] = value
260             else:
261                 _my_noproxy = value
262
263 def ip_to_int(ip):
264     ipint=0
265     shift=24
266     for dec in ip.split("."):
267         ipint |= int(dec) << shift
268         shift -= 8
269     return ipint
270
271 def int_to_ip(val):
272     ipaddr=""
273     shift=0
274     for i in range(4):
275         dec = val >> shift
276         dec &= 0xff
277         ipaddr = ".%d%s" % (dec, ipaddr)
278         shift += 8
279     return ipaddr[1:]
280
281 def isip(host):
282     if host.replace(".", "").isdigit():
283         return True
284     return False
285
286 def set_noproxy_list():
287     global _my_noproxy, _my_noproxy_list
288     _my_noproxy_list = []
289     if not _my_noproxy:
290         return
291     for item in _my_noproxy.split(","):
292         item = item.strip()
293         if not item:
294             continue
295         if item[0] != '.' and item.find("/") == -1:
296             """ Need to match it """
297             _my_noproxy_list.append({"match":0,"needle":item})
298         elif item[0] == '.':
299             """ Need to match at tail """
300             _my_noproxy_list.append({"match":1,"needle":item})
301         elif item.find("/") > 3:
302             """ IP/MASK, need to match at head """
303             needle = item[0:item.find("/")].strip()
304             ip = ip_to_int(needle)
305             netmask = 0
306             mask = item[item.find("/")+1:].strip()
307
308             if mask.isdigit():
309                 netmask = int(mask)
310                 netmask = ~((1<<(32-netmask)) - 1)
311                 ip &= netmask
312             else:
313                 shift=24
314                 netmask=0
315                 for dec in mask.split("."):
316                     netmask |= int(dec) << shift
317                     shift -= 8
318                 ip &= netmask
319             _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
320
321 def isnoproxy(url):
322     (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
323     if '@' in host:
324         user_pass, host = host.split('@', 1)
325     if ':' in host:
326         host, port = host.split(':', 1)
327     hostisip = isip(host)
328     for item in _my_noproxy_list:
329         if hostisip and item["match"] <= 1:
330             continue
331         if item["match"] == 2 and hostisip:
332             if (ip_to_int(host) & item["netmask"]) == item["needle"]:
333                 return True
334         if item["match"] == 0:
335             if host == item["needle"]:
336                 return True
337         if item["match"] == 1:
338             if host.rfind(item["needle"]) > 0:
339                 return True
340     return False
341
342 def set_proxies(proxy = None, no_proxy = None):
343     _set_proxies(proxy, no_proxy)
344     set_noproxy_list()
345
346 def get_proxy(url):
347     if url[0:4] == "file" or isnoproxy(url):
348         return None
349     type = url[0:url.index(":")]
350     proxy = None
351     if _my_proxies.has_key(type):
352         proxy = _my_proxies[type]
353     elif _my_proxies.has_key("http"):
354         proxy = _my_proxies["http"]
355     else:
356         proxy = None
357     return proxy
358
359 def get_temp_reponame(baseurl):
360     md5obj = hashlib.md5(baseurl)
361     tmpreponame = "%s" % md5obj.hexdigest()
362     return tmpreponame
363
364 def get_repostrs_from_ks(ks):
365     kickstart_repos = []
366     for repodata in ks.handler.repo.repoList:
367         repostr = ""
368         if hasattr(repodata, "name") and repodata.name:
369             repostr += ",name:" + repodata.name
370         if hasattr(repodata, "baseurl") and repodata.baseurl:
371             repostr += ",baseurl:" + repodata.baseurl
372         if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
373             repostr += ",mirrorlist:" + repodata.mirrorlist
374         if hasattr(repodata, "includepkgs") and repodata.includepkgs:
375             repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
376         if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
377             repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
378         if hasattr(repodata, "cost") and repodata.cost:
379             repostr += ",cost:%d" % repodata.cost
380         if hasattr(repodata, "save") and repodata.save:
381             repostr += ",save:"
382         if hasattr(repodata, "proxy") and repodata.proxy:
383             repostr += ",proxy:" + repodata.proxy
384         if hasattr(repodata, "proxyuser") and repodata.proxy_username:
385             repostr += ",proxyuser:" + repodata.proxy_username
386         if  hasattr(repodata, "proxypasswd") and repodata.proxy_password:
387             repostr += ",proxypasswd:" + repodata.proxy_password
388         if repostr.find("name:") == -1:
389             repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
390         if hasattr(repodata, "debuginfo") and repodata.debuginfo:
391             repostr += ",debuginfo:"
392         if hasattr(repodata, "source") and repodata.source:
393             repostr += ",source:"
394         if  hasattr(repodata, "gpgkey") and repodata.gpgkey:
395             repostr += ",gpgkey:" + repodata.gpgkey
396         kickstart_repos.append(repostr[1:])
397     return kickstart_repos
398
399 def get_uncompressed_data_from_url(url, filename, proxies):
400     filename = myurlgrab(url, filename, proxies)
401     suffix = None
402     if filename.endswith(".gz"):
403         suffix = ".gz"
404         gunzip = find_binary_path('gunzip')
405         subprocess.call([gunzip, "-f", filename])
406     elif filename.endswith(".bz2"):
407         suffix = ".bz2"
408         bunzip2 = find_binary_path('bunzip2')
409         subprocess.call([bunzip2, "-f", filename])
410     if suffix:
411         filename = filename.replace(suffix, "")
412     return filename
413
414 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
415     url = str(baseurl + "/" + filename)
416     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
417     return get_uncompressed_data_from_url(url,filename_tmp,proxies)
418
419 def get_metadata_from_repos(repostrs, cachedir):
420     my_repo_metadata = []
421     for repostr in repostrs:
422         reponame = None
423         baseurl = None
424         proxy = None
425         items = repostr.split(",")
426         for item in items:
427             subitems = item.split(":")
428             if subitems[0] == "name":
429                 reponame = subitems[1]
430             if subitems[0] == "baseurl":
431                 baseurl = item[8:]
432             if subitems[0] == "proxy":
433                 proxy = item[6:]
434             if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
435                 baseurl = item
436         if not proxy:
437             proxy = get_proxy(baseurl)
438         proxies = None
439         if proxy:
440            proxies = {str(proxy.split(":")[0]):str(proxy)}
441         makedirs(cachedir + "/" + reponame)
442         url = str(baseurl + "/repodata/repomd.xml")
443         filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
444         repomd = myurlgrab(url, filename, proxies)
445         try:
446             root = xmlparse(repomd)
447         except SyntaxError:
448             raise CreatorError("repomd.xml syntax error.")
449
450         ns = root.getroot().tag
451         ns = ns[0:ns.rindex("}")+1]
452
453         patterns = None
454         for elm in root.getiterator("%sdata" % ns):
455             if elm.attrib["type"] == "patterns":
456                 patterns = elm.find("%slocation" % ns).attrib['href']
457                 break
458
459         comps = None
460         for elm in root.getiterator("%sdata" % ns):
461             if elm.attrib["type"] == "group_gz":
462                 comps = elm.find("%slocation" % ns).attrib['href']
463                 break
464         if not comps:
465             for elm in root.getiterator("%sdata" % ns):
466                 if elm.attrib["type"] == "group":
467                     comps = elm.find("%slocation" % ns).attrib['href']
468                     break
469
470         primary_type = None
471         for elm in root.getiterator("%sdata" % ns):
472             if elm.attrib["type"] == "primary_db":
473                 primary_type=".sqlite"
474                 break
475
476         if not primary_type:
477             for elm in root.getiterator("%sdata" % ns):
478                 if elm.attrib["type"] == "primary":
479                     primary_type=".xml"
480                     break
481
482         if not primary_type:
483             continue
484
485         primary = elm.find("%slocation" % ns).attrib['href']
486         primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
487
488         if patterns:
489             patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
490
491         if comps:
492             comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
493
494         """ Get repo key """
495         try:
496             repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
497         except CreatorError:
498             repokey = None
499             msger.warning("can't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
500
501         my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
502
503     return my_repo_metadata
504
505 def get_arch(repometadata):
506     archlist = []
507     for repo in repometadata:
508         if repo["primary"].endswith(".xml"):
509             root = xmlparse(repo["primary"])
510             ns = root.getroot().tag
511             ns = ns[0:ns.rindex("}")+1]
512             for elm in root.getiterator("%spackage" % ns):
513                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
514                     arch = elm.find("%sarch" % ns).text
515                     if arch not in archlist:
516                         archlist.append(arch)
517         elif repo["primary"].endswith(".sqlite"):
518             con = sqlite.connect(repo["primary"])
519             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
520                 if row[0] not in archlist:
521                     archlist.append(row[0])
522
523             con.close()
524     return archlist
525
526 def get_package(pkg, repometadata, arch = None):
527     ver = ""
528     target_repo = None
529     for repo in repometadata:
530         if repo["primary"].endswith(".xml"):
531             root = xmlparse(repo["primary"])
532             ns = root.getroot().tag
533             ns = ns[0:ns.rindex("}")+1]
534             for elm in root.getiterator("%spackage" % ns):
535                 if elm.find("%sname" % ns).text == pkg:
536                     if elm.find("%sarch" % ns).text != "src":
537                         version = elm.find("%sversion" % ns)
538                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
539                         if tmpver > ver:
540                             ver = tmpver
541                             location = elm.find("%slocation" % ns)
542                             pkgpath = "%s" % location.attrib['href']
543                             target_repo = repo
544                         break
545         if repo["primary"].endswith(".sqlite"):
546             con = sqlite.connect(repo["primary"])
547             if not arch:
548                 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
549                     tmpver = "%s-%s" % (row[0], row[1])
550                     if tmpver > ver:
551                         pkgpath = "%s" % row[2]
552                         target_repo = repo
553                     break
554             else:
555                 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
556                     tmpver = "%s-%s" % (row[0], row[1])
557                     if tmpver > ver:
558                         pkgpath = "%s" % row[2]
559                         target_repo = repo
560                     break
561             con.close()
562     if target_repo:
563         makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
564         url = str(target_repo["baseurl"] + "/" + pkgpath)
565         filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
566         pkg = myurlgrab(url, filename, target_repo["proxies"])
567         return pkg
568     else:
569         return None
570
571 def get_source_name(pkg, repometadata):
572
573     def get_bin_name(pkg):
574         m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
575         if m:
576             return m.group(1)
577         return None
578
579     def get_src_name(srpm):
580         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
581         if m:
582             return m.group(1)
583         return None
584
585     ver = ""
586     target_repo = None
587
588     pkg_name = get_bin_name(pkg)
589     if not pkg_name:
590         return None
591
592     for repo in repometadata:
593         if repo["primary"].endswith(".xml"):
594             root = xmlparse(repo["primary"])
595             ns = root.getroot().tag
596             ns = ns[0:ns.rindex("}")+1]
597             for elm in root.getiterator("%spackage" % ns):
598                 if elm.find("%sname" % ns).text == pkg_name:
599                     if elm.find("%sarch" % ns).text != "src":
600                         version = elm.find("%sversion" % ns)
601                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
602                         if tmpver > ver:
603                             ver = tmpver
604                             fmt = elm.find("%sformat" % ns)
605                             if fmt:
606                                 fns = fmt.getchildren()[0].tag
607                                 fns = fns[0:fns.rindex("}")+1]
608                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
609                                 target_repo = repo
610                         break
611
612         if repo["primary"].endswith(".sqlite"):
613             con = sqlite.connect(repo["primary"])
614             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
615                 tmpver = "%s-%s" % (row[0], row[1])
616                 if tmpver > ver:
617                     pkgpath = "%s" % row[2]
618                     target_repo = repo
619                 break
620             con.close()
621     if target_repo:
622         return get_src_name(pkgpath)
623     else:
624         return None
625
626 def get_release_no(repometadata, distro="meego"):
627     cpio = find_binary_path("cpio")
628     rpm2cpio = find_binary_path("rpm2cpio")
629     release_pkg = get_package("%s-release" % distro, repometadata)
630     if release_pkg:
631         tmpdir = mkdtemp()
632         oldcwd = os.getcwd()
633         os.chdir(tmpdir)
634         p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
635         p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
636         p2.communicate()
637         f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
638         content = f.read()
639         f.close()
640         os.chdir(oldcwd)
641         shutil.rmtree(tmpdir, ignore_errors = True)
642         return content.split(" ")[2]
643     else:
644         return "UNKNOWN"
645
646 def get_kickstarts_from_repos(repometadata):
647     kickstarts = []
648     for repo in repometadata:
649         try:
650             root = xmlparse(repo["repomd"])
651         except SyntaxError:
652             raise CreatorError("repomd.xml syntax error.")
653
654         ns = root.getroot().tag
655         ns = ns[0:ns.rindex("}")+1]
656
657         for elm in root.getiterator("%sdata" % ns):
658             if elm.attrib["type"] == "image-config":
659                 break
660
661         if elm.attrib["type"] != "image-config":
662             continue
663
664         location = elm.find("%slocation" % ns)
665         image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
666         filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
667
668         image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
669
670         try:
671             root = xmlparse(image_config)
672         except SyntaxError:
673             raise CreatorError("image-config.xml syntax error.")
674
675         for elm in root.getiterator("config"):
676             path = elm.find("path").text
677             path = path.replace("images-config", "image-config")
678             description = elm.find("description").text
679             makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
680             url = path
681             if "http" not in path:
682                 url = str(repo["baseurl"] + "/" + path)
683             filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
684             path = myurlgrab(url, filename, repo["proxies"])
685             kickstarts.append({"filename":path,"description":description})
686         return kickstarts
687
688 def select_ks(ksfiles):
689     msger.info("Available kickstart files:")
690     i = 0
691     for ks in ksfiles:
692         i += 1
693         msger.raw("\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"])))
694
695     while True:
696         choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
697         if choice.lower() == "q":
698             sys.exit(1)
699         if choice.isdigit():
700             choice = int(choice)
701             if choice >= 1 and choice <= i:
702                 break
703
704     return ksfiles[choice-1]["filename"]
705
706 def get_pkglist_in_patterns(group, patterns):
707     found = False
708     pkglist = []
709     try:
710         root = xmlparse(patterns)
711     except SyntaxError:
712         raise SyntaxError("%s syntax error." % patterns)
713
714     for elm in list(root.getroot()):
715         ns = elm.tag
716         ns = ns[0:ns.rindex("}")+1]
717         name = elm.find("%sname" % ns)
718         summary = elm.find("%ssummary" % ns)
719         if name.text == group or summary.text == group:
720             found = True
721             break
722
723     if not found:
724         return pkglist
725
726     found = False
727     for requires in list(elm):
728         if requires.tag.endswith("requires"):
729             found = True
730             break
731
732     if not found:
733         return pkglist
734
735     for pkg in list(requires):
736         pkgname = pkg.attrib["name"]
737         if pkgname not in pkglist:
738             pkglist.append(pkgname)
739
740     return pkglist
741
742 def get_pkglist_in_comps(group, comps):
743     found = False
744     pkglist = []
745     try:
746         root = xmlparse(comps)
747     except SyntaxError:
748         raise SyntaxError("%s syntax error." % comps)
749
750     for elm in root.getiterator("group"):
751         id = elm.find("id")
752         name = elm.find("name")
753         if id.text == group or name.text == group:
754             packagelist = elm.find("packagelist")
755             found = True
756             break
757
758     if not found:
759         return pkglist
760
761     for require in elm.getiterator("packagereq"):
762         if require.tag.endswith("packagereq"):
763             pkgname = require.text
764         if pkgname not in pkglist:
765             pkglist.append(pkgname)
766
767     return pkglist
768
769 def is_statically_linked(binary):
770     ret = False
771     dev_null = os.open("/dev/null", os.O_WRONLY)
772     filecmd = find_binary_path("file")
773     args = [ filecmd, binary ]
774     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
775     output = file.communicate()[0]
776     os.close(dev_null)
777     if output.find(", statically linked, ") > 0:
778         ret = True
779     return ret
780
781 def setup_qemu_emulator(rootdir, arch):
782     # mount binfmt_misc if it doesn't exist
783     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
784         modprobecmd = find_binary_path("modprobe")
785         subprocess.call([modprobecmd, "binfmt_misc"])
786     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
787         mountcmd = find_binary_path("mount")
788         subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
789
790     # qemu_emulator is a special case, we can't use find_binary_path
791     # qemu emulator should be a statically-linked executable file
792     qemu_emulator = "/usr/bin/qemu-arm"
793     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
794         qemu_emulator = "/usr/bin/qemu-arm-static"
795     if not os.path.exists(qemu_emulator):
796         raise CreatorError("Please install a statically-linked qemu-arm")
797     if not os.path.exists(rootdir + "/usr/bin"):
798         makedirs(rootdir + "/usr/bin")
799     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
800
801     # disable selinux, selinux will block qemu emulator to run
802     if os.path.exists("/usr/sbin/setenforce"):
803         subprocess.call(["/usr/sbin/setenforce", "0"])
804
805     node = "/proc/sys/fs/binfmt_misc/arm"
806     if is_statically_linked(qemu_emulator) and os.path.exists(node):
807         return qemu_emulator
808
809     # unregister it if it has been registered and is a dynamically-linked executable
810     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
811         qemu_unregister_string = "-1\n"
812         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
813         fd.write(qemu_unregister_string)
814         fd.close()
815
816     # register qemu emulator for interpreting other arch executable file
817     if not os.path.exists(node):
818         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
819         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
820         fd.write(qemu_arm_string)
821         fd.close()
822
823     return qemu_emulator
824
825 def create_release(config, destdir, name, outimages, release):
826     """ TODO: This functionality should really be in creator.py inside the
827     ImageCreator class. """
828
829     # For virtual machine images, we have a subdir for it, this is unnecessary
830     # for release
831     thatsubdir = None
832     for i in range(len(outimages)):
833         file = outimages[i]
834         if not os.path.isdir(file) and os.path.dirname(file) != destdir:
835             thatsubdir = os.path.dirname(file)
836             newfile = os.path.join(destdir, os.path.basename(file))
837             shutil.move(file, newfile)
838             outimages[i] = newfile
839     if thatsubdir:
840         shutil.rmtree(thatsubdir, ignore_errors = True)
841
842     """ Create release directory and files """
843     os.system ("cp %s %s/%s.ks" % (config, destdir, name))
844     # When building a release we want to make sure the .ks
845     # file generates the same build even when --release= is not used.
846     fd = open(config, "r")
847     kscont = fd.read()
848     fd.close()
849     kscont = kscont.replace("@BUILD_ID@",release)
850     fd = open("%s/%s.ks" % (destdir,name), "w")
851     fd.write(kscont)
852     fd.close()
853     outimages.append("%s/%s.ks" % (destdir,name))
854
855     # Using system + mv, because of * in filename.
856     os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
857     outimages.append("%s/%s.packages" % (destdir,name))
858
859     d = os.listdir(destdir)
860     for f in d:
861         if f.endswith(".iso"):
862             ff = f.replace(".iso", ".img")
863             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
864             outimages.append("%s/%s" %(destdir, ff))
865         elif f.endswith(".usbimg"):
866             ff = f.replace(".usbimg", ".img")
867             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
868             outimages.append("%s/%s" %(destdir, ff))
869
870     fd = open(destdir + "/MANIFEST", "w")
871     d = os.listdir(destdir)
872     for f in d:
873         if f == "MANIFEST":
874             continue
875         if os.path.exists("/usr/bin/md5sum"):
876             p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
877                              stdout=subprocess.PIPE)
878             (md5sum, errorstr) = p.communicate()
879             if p.returncode != 0:
880                 msger.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
881             else:
882                 md5sum = md5sum.split(" ")[0]
883                 fd.write(md5sum+" "+f+"\n")
884
885     outimages.append("%s/MANIFEST" % destdir)
886     fd.close()
887
888     """ Update the file list. """
889     updated_list = []
890     for file in outimages:
891         if os.path.exists("%s" % file):
892             updated_list.append(file)
893
894     return updated_list
895
896 def get_local_distro():
897     msger.info("Local linux distribution:")
898     for file in glob.glob("/etc/*-release"):
899         fd = open(file, "r")
900         content = fd.read()
901         fd.close()
902         msger.info(content)
903     if os.path.exists("/etc/issue"):
904         fd = open("/etc/issue", "r")
905         content = fd.read()
906         fd.close()
907         msger.info(content)
908
909     msger.info("Local Kernel version: " + os.uname()[2])
910
911 def check_mic_installation(argv):
912     creator_name = os.path.basename(argv[0])
913     if os.path.exists("/usr/local/bin/" + creator_name) \
914         and os.path.exists("/usr/bin/" + creator_name):
915         raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from  installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
916
917 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
918
919     def get_source_repometadata(repometadata):
920         src_repometadata=[]
921         for repo in repometadata:
922             if repo["name"].endswith("-source"):
923                 src_repometadata.append(repo)
924         if src_repometadata:
925             return src_repometadata
926         return None
927
928     def get_src_name(srpm):
929         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
930         if m:
931             return m.group(1)
932         return None
933
934     src_repometadata = get_source_repometadata(repometadata)
935
936     if not src_repometadata:
937         msger.warning("No source repo found")
938         return None
939
940     src_pkgs = []
941     lpkgs_dict = {}
942     lpkgs_path = []
943     for repo in src_repometadata:
944         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
945         lpkgs_path += glob.glob(cachepath)
946
947     for lpkg in lpkgs_path:
948         lpkg_name = get_src_name(os.path.basename(lpkg))
949         lpkgs_dict[lpkg_name] = lpkg
950     localpkgs = lpkgs_dict.keys()
951
952     cached_count = 0
953     destdir = instroot+'/usr/src/SRPMS'
954     if not os.path.exists(destdir):
955         os.makedirs(destdir)
956
957     srcpkgset = set()
958     for _pkg in pkgs:
959         srcpkg_name = get_source_name(_pkg, repometadata)
960         if not srcpkg_name:
961             return None
962         srcpkgset.add(srcpkg_name)
963
964     for pkg in list(srcpkgset):
965         if pkg in localpkgs:
966             cached_count += 1
967             shutil.copy(lpkgs_dict[pkg], destdir)
968             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
969         else:
970             src_pkg = get_package(pkg, src_repometadata, 'src')
971             if src_pkg:
972                 shutil.copy(src_pkg, destdir)
973                 src_pkgs.append(src_pkg)
974     msger.info("%d source packages gotten from cache" %cached_count)
975
976     return src_pkgs