Updated chroot's cleanup function
[tools/mic.git] / micng / utils / misc.py
1 #
2 # misc.py : miscellaneous utilities
3 #
4 # Copyright 2010, Intel Inc.
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Library General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18
19
20 import os
21 import sys
22 import subprocess
23 import logging
24 import tempfile
25 import re
26 import shutil
27 import glob
28 import xml.dom.minidom
29 import hashlib
30 import urlparse
31 import locale
32 import codecs
33
34 try:
35     import sqlite3 as sqlite
36 except ImportError:
37     import sqlite
38 import _sqlitecache
39
40 try:
41     from xml.etree import cElementTree
42 except ImportError:
43     import cElementTree
44 xmlparse = cElementTree.parse
45
46 from errors import *
47 from fs_related import *
48
49 chroot_lockfd = -1
50 chroot_lock = ""
51
52 def setlocale():
53     try:
54         locale.setlocale(locale.LC_ALL,'')
55     except locale.Error:
56         os.environ['LC_ALL'] = 'C'
57         locale.setlocale(locale.LC_ALL,'C')
58     sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
59     sys.stdout.errors = 'replace'
60
61 def get_extension_name(path):
62     match = re.search("(?<=\.)\w+$", path)
63     if match:
64         return match.group(0)
65     else:
66         return None
67
68 def get_image_type(path):
69     if os.path.isdir(path):
70         if ismeego(path):
71             return "fs"
72         return None
73     maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
74     extension = get_extension_name(path)
75     if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
76         return maptab[extension]
77
78     fd = open(path, "rb")
79     file_header = fd.read(1024)
80     fd.close()
81     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
82     if file_header[0:len(vdi_flag)] == vdi_flag:
83         return maptab["vdi"]
84
85     dev_null = os.open("/dev/null", os.O_WRONLY)
86     filecmd = find_binary_path("file")
87     args = [ filecmd, path ]
88     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
89     output = file.communicate()[0]
90     os.close(dev_null)
91     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
92     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
93     rawptn = re.compile(r".*x86 boot sector.*")
94     vmdkptn = re.compile(r".*VMware. disk image.*")
95     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
96     if isoptn.match(output):
97         return maptab["iso"]
98     elif usbimgptn.match(output):
99         return maptab["usbimg"]
100     elif rawptn.match(output):
101         return maptab["raw"]
102     elif vmdkptn.match(output):
103         return maptab["vmdk"]
104     elif ext3fsimgptn.match(output):
105         return "ext3fsimg"
106     else:
107         return None
108
109 def get_file_size(file):
110     """Return size in MB unit"""
111     du = find_binary_path("du")
112     dev_null = os.open("/dev/null", os.O_WRONLY)
113     duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
114                                stdout=subprocess.PIPE, stderr=dev_null)
115     duOutput = duProc.communicate()[0]
116     if duProc.returncode:
117         raise CreatorError("Failed to run %s" % du)
118
119     size1 = int(duOutput.split()[0])
120     duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
121                                stdout=subprocess.PIPE, stderr=dev_null)
122     duOutput = duProc.communicate()[0]
123     if duProc.returncode:
124         raise CreatorError("Failed to run %s" % du)
125
126     size2 = int(duOutput.split()[0])
127     os.close(dev_null)
128     if size1 > size2:
129         return size1
130     else:
131         return size2
132
133 def get_filesystem_avail(fs):
134     vfstat = os.statvfs(fs)
135     return vfstat.f_bavail * vfstat.f_bsize
136
137 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
138     #convert disk format
139     if dstfmt != "raw":
140         raise CreatorError("Invalid destination image format: %s" % dstfmt)
141     logging.debug("converting %s image to %s" % (srcimg, dstimg))
142     if srcfmt == "vmdk":
143         path = find_binary_path("qemu-img")
144         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
145     elif srcfmt == "vdi":
146         path = find_binary_path("VBoxManage")
147         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
148     else:
149         raise CreatorError("Invalid soure image format: %s" % srcfmt)
150
151     rc = subprocess.call(argv)
152     if rc == 0:
153         logging.debug("convert successful")
154     if rc != 0:
155         raise CreatorError("Unable to convert disk to %s" % dstfmt)
156
157 def myxcopytree(src, dst):
158     dev_null = os.open("/dev/null", os.O_WRONLY)
159     dirnames = os.listdir(src)
160     copycmd = find_binary_path("cp")
161     for dir in dirnames:
162         args = [ copycmd, "-af", src + "/" + dir, dst ]
163         subprocess.call(args, stdout=dev_null, stderr=dev_null)
164     os.close(dev_null)
165     ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
166     for exclude in ignores:
167         if os.path.exists(dst + "/" + exclude):
168             os.unlink(dst + "/" + exclude)
169
170 def uncompress_squashfs(squashfsimg, outdir):
171     """Uncompress file system from squshfs image"""
172     unsquashfs = find_binary_path("unsquashfs")
173     args = [ unsquashfs, "-d", outdir, squashfsimg ]
174     rc = subprocess.call(args)
175     if (rc != 0):
176         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
177
178 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
179     makedirs(dir)
180     return tempfile.mkdtemp(dir = dir, prefix = prefix)
181
182 def ismeego(rootdir):
183     ret = False
184     if (os.path.exists(rootdir + "/etc/moblin-release") \
185        or os.path.exists(rootdir + "/etc/meego-release")) \
186        and os.path.exists(rootdir + "/etc/inittab") \
187        and os.path.exists(rootdir + "/etc/rc.sysinit") \
188        and glob.glob(rootdir + "/boot/vmlinuz-*"):
189         ret = True
190
191     return ret
192
193
194 def is_meego_bootstrap(rootdir):
195     ret = False
196     if (os.path.exists(rootdir + "/etc/moblin-release") \
197        or os.path.exists(rootdir + "/etc/meego-release")) \
198        and os.path.exists(rootdir + "/usr/bin/python") \
199        and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
200         ret = True
201
202     return ret
203
204
205 _my_proxies = {}
206 _my_noproxy = None
207 _my_noproxy_list = []
208
209 def set_proxy_environ():
210     global _my_noproxy, _my_proxies
211     if not _my_proxies:
212         return
213     for key in _my_proxies.keys():
214         os.environ[key + "_proxy"] = _my_proxies[key]
215     if not _my_noproxy:
216         return
217     os.environ["no_proxy"] = _my_noproxy
218
219 def unset_proxy_environ():
220    if os.environ.has_key("http_proxy"):
221        del os.environ["http_proxy"]
222    if os.environ.has_key("https_proxy"):
223        del os.environ["https_proxy"]
224    if os.environ.has_key("ftp_proxy"):
225        del os.environ["ftp_proxy"]
226    if os.environ.has_key("all_proxy"):
227        del os.environ["all_proxy"]
228    if os.environ.has_key("no_proxy"):
229        del os.environ["no_proxy"]
230    if os.environ.has_key("HTTP_PROXY"):
231        del os.environ["HTTP_PROXY"]
232    if os.environ.has_key("HTTPS_PROXY"):
233        del os.environ["HTTPS_PROXY"]
234    if os.environ.has_key("FTP_PROXY"):
235        del os.environ["FTP_PROXY"]
236    if os.environ.has_key("ALL_PROXY"):
237        del os.environ["ALL_PROXY"]
238    if os.environ.has_key("NO_PROXY"):
239        del os.environ["NO_PROXY"]
240
241 def _set_proxies(proxy = None, no_proxy = None):
242     """Return a dictionary of scheme -> proxy server URL mappings."""
243     global _my_noproxy, _my_proxies
244     _my_proxies = {}
245     _my_noproxy = None
246     proxies = []
247     if proxy:
248        proxies.append(("http_proxy", proxy))
249     if no_proxy:
250        proxies.append(("no_proxy", no_proxy))
251
252     """Get proxy settings from environment variables if not provided"""
253     if not proxy and not no_proxy:
254        proxies = os.environ.items()
255
256        """ Remove proxy env variables, urllib2 can't handle them correctly """
257        unset_proxy_environ()
258
259     for name, value in proxies:
260         name = name.lower()
261         if value and name[-6:] == '_proxy':
262             if name[0:2] != "no":
263                 _my_proxies[name[:-6]] = value
264             else:
265                 _my_noproxy = value
266
267 def ip_to_int(ip):
268     ipint=0
269     shift=24
270     for dec in ip.split("."):
271         ipint |= int(dec) << shift
272         shift -= 8
273     return ipint
274
275 def int_to_ip(val):
276     ipaddr=""
277     shift=0
278     for i in range(4):
279         dec = val >> shift
280         dec &= 0xff
281         ipaddr = ".%d%s" % (dec, ipaddr)
282         shift += 8
283     return ipaddr[1:]
284
285 def isip(host):
286     if host.replace(".", "").isdigit():
287         return True
288     return False
289
290 def set_noproxy_list():
291     global _my_noproxy, _my_noproxy_list
292     _my_noproxy_list = []
293     if not _my_noproxy:
294         return
295     for item in _my_noproxy.split(","):
296         item = item.strip()
297         if not item:
298             continue
299         if item[0] != '.' and item.find("/") == -1:
300             """ Need to match it """
301             _my_noproxy_list.append({"match":0,"needle":item})
302         elif item[0] == '.':
303             """ Need to match at tail """
304             _my_noproxy_list.append({"match":1,"needle":item})
305         elif item.find("/") > 3:
306             """ IP/MASK, need to match at head """
307             needle = item[0:item.find("/")].strip()
308             ip = ip_to_int(needle)
309             netmask = 0
310             mask = item[item.find("/")+1:].strip()
311
312             if mask.isdigit():
313                 netmask = int(mask)
314                 netmask = ~((1<<(32-netmask)) - 1)
315                 ip &= netmask
316             else:
317                 shift=24
318                 netmask=0
319                 for dec in mask.split("."):
320                     netmask |= int(dec) << shift
321                     shift -= 8
322                 ip &= netmask
323             _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
324
325 def isnoproxy(url):
326     (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
327     if '@' in host:
328         user_pass, host = host.split('@', 1)
329     if ':' in host:
330         host, port = host.split(':', 1)
331     hostisip = isip(host)
332     for item in _my_noproxy_list:
333         if hostisip and item["match"] <= 1:
334             continue
335         if item["match"] == 2 and hostisip:
336             if (ip_to_int(host) & item["netmask"]) == item["needle"]:
337                 return True
338         if item["match"] == 0:
339             if host == item["needle"]:
340                 return True
341         if item["match"] == 1:
342             if host.rfind(item["needle"]) > 0:
343                 return True
344     return False
345
346 def set_proxies(proxy = None, no_proxy = None):
347     _set_proxies(proxy, no_proxy)
348     set_noproxy_list()
349
350 def get_proxy(url):
351     if url[0:4] == "file" or isnoproxy(url):
352         return None
353     type = url[0:url.index(":")]
354     proxy = None
355     if _my_proxies.has_key(type):
356         proxy = _my_proxies[type]
357     elif _my_proxies.has_key("http"):
358         proxy = _my_proxies["http"]
359     else:
360         proxy = None
361     return proxy
362
363 def remap_repostr(repostr, siteconf):
364     items = repostr.split(",")
365     name = None
366     baseurl = None
367     for item in items:
368         subitems = item.split(":")
369         if subitems[0] == "name":
370             name = subitems[1]
371         if subitems[0] == "baseurl":
372             baseurl = item[8:]
373     if not baseurl:
374         baseurl = repostr
375
376     for section in siteconf._sections:
377         if section != "main":
378             if not siteconf.has_option(section, "enabled") or siteconf.get(section, "enabled") == "0":
379                 continue
380             if siteconf.has_option(section, "equalto"):
381                 equalto = siteconf.get(section, "equalto")
382                 if (name and equalto == name) or (baseurl and equalto == baseurl):
383                     remap_baseurl = siteconf.get(section, "baseurl")
384                     repostr = repostr.replace(baseurl, remap_baseurl)
385                     return repostr
386
387     return repostr
388
389
390 def get_temp_reponame(baseurl):
391     md5obj = hashlib.md5(baseurl)
392     tmpreponame = "%s" % md5obj.hexdigest()
393     return tmpreponame
394
395 def get_repostr(repo, siteconf = None):
396     if siteconf:
397         repo = remap_repostr(repo, siteconf)
398     keys = ("baseurl", "mirrorlist", "name", "cost", "includepkgs", "excludepkgs", "proxy", "save", "proxyuser", "proxypasswd", "debuginfo", "source", "gpgkey")
399     repostr = "repo"
400     items = repo.split(",")
401     if len(items) == 1:
402         subitems = items[0].split(":")
403         if len(subitems) == 1:
404             url = subitems[0]
405             repostr += " --baseurl=%s" % url
406         elif subitems[0] == "baseurl":
407             url = items[0][8:]
408             repostr += " --baseurl=%s" % url
409         elif subitems[0] in ("http", "ftp", "https", "ftps", "file"):
410             url = items[0]
411             repostr += " --baseurl=%s" % url
412         else:
413             raise ValueError("Invalid repo string")
414         if url.find("://") == -1 \
415            or url[0:url.index("://")] not in ("http", "ftp", "https", "ftps", "file") \
416            or url.find("/", url.index("://")+3) == -1:
417             raise ValueError("Invalid repo string")
418     else:
419         if repo.find("baseurl:") == -1 and repo.find("mirrorlist:") == -1:
420             raise ValueError("Invalid repo string")
421         url = None
422         for item in items:
423             if not item:
424                 continue
425             subitems = item.split(":")
426             if subitems[0] in keys:
427                 if subitems[0] in ("baseurl", "mirrorlist"):
428                     url = item[len(subitems[0])+1:]
429                 if subitems[0] in ("save", "debuginfo", "source"):
430                     repostr += " --%s" % subitems[0]
431                 elif subitems[0] in ("includepkgs", "excludepkgs"):
432                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:].replace(";", ","))
433                 else:
434                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:])
435             else:
436                 raise ValueError("Invalid repo string")
437     if url.find("://") != -1 \
438        and url[0:url.index("://")] in ("http", "ftp", "https", "ftps", "file") \
439        and url.find("/", url.index("://")+3) != -1:
440         if repostr.find("--proxy=") == -1:
441             proxy = get_proxy(url)
442             if proxy:
443                 repostr += " --proxy=%s" % proxy
444     else:
445         raise ValueError("Invalid repo string")
446
447     if repostr.find("--name=") == -1:
448         repostr += " --name=%s" % get_temp_reponame(url)
449
450     return repostr
451
452 DEFAULT_SITECONF_GLOBAL="/etc/mic2/mic2.conf"
453 DEFAULT_SITECONF_USER="~/.mic2.conf"
454
455 def read_siteconf(siteconf = None):
456     from ConfigParser import SafeConfigParser
457
458     my_siteconf_parser = SafeConfigParser()
459     if not siteconf:
460         global_siteconf = DEFAULT_SITECONF_GLOBAL
461         if os.path.isfile(global_siteconf):
462             my_siteconf_parser.read(global_siteconf)
463
464         local_siteconf = os.path.expanduser(DEFAULT_SITECONF_USER)
465         if os.path.isfile(local_siteconf):
466             my_siteconf_parser.read(local_siteconf)
467     else:
468         my_siteconf_parser.read(siteconf)
469
470     if not my_siteconf_parser.sections():
471         return None
472     else:
473         return my_siteconf_parser
474
475 def output_siteconf(siteconf):
476     output = ""
477     if not siteconf:
478         return output
479
480     for section in siteconf.sections():
481         output += "[%s]\n" % section
482         for option in siteconf.options(section):
483             output += "%s=%s\n" % (option, siteconf.get(section, option))
484         output += "\n\n"
485
486     print output
487     return output
488
489 def get_repostrs_from_ks(ks):
490     kickstart_repos = []
491     for repodata in ks.handler.repo.repoList:
492         repostr = ""
493         if hasattr(repodata, "name") and repodata.name:
494             repostr += ",name:" + repodata.name
495         if hasattr(repodata, "baseurl") and repodata.baseurl:
496             repostr += ",baseurl:" + repodata.baseurl
497         if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
498             repostr += ",mirrorlist:" + repodata.mirrorlist
499         if hasattr(repodata, "includepkgs") and repodata.includepkgs:
500             repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
501         if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
502             repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
503         if hasattr(repodata, "cost") and repodata.cost:
504             repostr += ",cost:%d" % repodata.cost
505         if hasattr(repodata, "save") and repodata.save:
506             repostr += ",save:"
507         if hasattr(repodata, "proxy") and repodata.proxy:
508             repostr += ",proxy:" + repodata.proxy
509         if hasattr(repodata, "proxyuser") and repodata.proxy_username:
510             repostr += ",proxyuser:" + repodata.proxy_username
511         if  hasattr(repodata, "proxypasswd") and repodata.proxy_password:
512             repostr += ",proxypasswd:" + repodata.proxy_password
513         if repostr.find("name:") == -1:
514             repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
515         if hasattr(repodata, "debuginfo") and repodata.debuginfo:
516             repostr += ",debuginfo:"
517         if hasattr(repodata, "source") and repodata.source:
518             repostr += ",source:"
519         if  hasattr(repodata, "gpgkey") and repodata.gpgkey:
520             repostr += ",gpgkey:" + repodata.gpgkey
521         kickstart_repos.append(repostr[1:])
522     return kickstart_repos
523
524 def get_repostrs_from_siteconf(siteconf):
525     site_repos = []
526     if not siteconf:
527         return site_repos
528
529     for section in siteconf._sections:
530         if section != "main":
531             repostr = ""
532             if siteconf.has_option(section, "enabled") \
533                and siteconf.get(section, "enabled") == "1" \
534                and (not siteconf.has_option(section, "equalto") or not siteconf.get(section, "equalto")):
535                 if siteconf.has_option(section, "name") and siteconf.get(section, "name"):
536                     repostr += ",name:%s" % siteconf.get(section, "name")
537                 if siteconf.has_option(section, "baseurl") and siteconf.get(section, "baseurl"):
538                     repostr += ",baseurl:%s" % siteconf.get(section, "baseurl")
539                 if siteconf.has_option(section, "mirrorlist") and siteconf.get(section, "mirrorlist"):
540                     repostr += ",mirrorlist:%s" % siteconf.get(section, "mirrorlist")
541                 if siteconf.has_option(section, "includepkgs") and siteconf.get(section, "includepkgs"):
542                     repostr += ",includepkgs:%s" % siteconf.get(section, "includepkgs").replace(",", ";")
543                 if siteconf.has_option(section, "excludepkgs") and siteconf.get(section, "excludepkgs"):
544                     repostr += ",excludepkgs:%s" % siteconf.get(section, "excludepkgs").replace(",", ";")
545                 if siteconf.has_option(section, "cost") and siteconf.get(section, "cost"):
546                     repostr += ",cost:%s" % siteconf.get(section, "cost")
547                 if siteconf.has_option(section, "save") and siteconf.get(section, "save"):
548                     repostr += ",save:"
549                 if siteconf.has_option(section, "proxy") and siteconf.get(section, "proxy"):
550                     repostr += ",proxy:%s" % siteconf.get(section, "proxy")
551                 if siteconf.has_option(section, "proxy_username") and siteconf.get(section, "proxy_username"):
552                     repostr += ",proxyuser:%s" % siteconf.get(section, "proxy_username")
553                 if siteconf.has_option(section, "proxy_password") and siteconf.get(section, "proxy_password"):
554                     repostr += ",proxypasswd:%s" % siteconf.get(section, "proxy_password")
555             if repostr != "":
556                 if repostr.find("name:") == -1:
557                     repostr = ",name:%s" % get_temp_reponame()
558                 site_repos.append(repostr[1:])
559     return site_repos
560
561 def get_uncompressed_data_from_url(url, filename, proxies):
562     filename = myurlgrab(url, filename, proxies)
563     suffix = None
564     if filename.endswith(".gz"):
565         suffix = ".gz"
566         gunzip = find_binary_path('gunzip')
567         subprocess.call([gunzip, "-f", filename])
568     elif filename.endswith(".bz2"):
569         suffix = ".bz2"
570         bunzip2 = find_binary_path('bunzip2')
571         subprocess.call([bunzip2, "-f", filename])
572     if suffix:
573         filename = filename.replace(suffix, "")
574     return filename
575
576 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
577     url = str(baseurl + "/" + filename)
578     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
579     return get_uncompressed_data_from_url(url,filename_tmp,proxies)
580
581 def get_metadata_from_repos(repostrs, cachedir):
582     if not cachedir:
583         CreatorError("No cache dir defined.")
584
585     my_repo_metadata = []
586     for repostr in repostrs:
587         reponame = None
588         baseurl = None
589         proxy = None
590         items = repostr.split(",")
591         for item in items:
592             subitems = item.split(":")
593             if subitems[0] == "name":
594                 reponame = subitems[1]
595             if subitems[0] == "baseurl":
596                 baseurl = item[8:]
597             if subitems[0] == "proxy":
598                 proxy = item[6:]
599             if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
600                 baseurl = item
601         if not proxy:
602             proxy = get_proxy(baseurl)
603         proxies = None
604         if proxy:
605            proxies = {str(proxy.split(":")[0]):str(proxy)}
606         makedirs(cachedir + "/" + reponame)
607         url = str(baseurl + "/repodata/repomd.xml")
608         filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
609         repomd = myurlgrab(url, filename, proxies)
610         try:
611             root = xmlparse(repomd)
612         except SyntaxError:
613             raise CreatorError("repomd.xml syntax error.")
614
615         ns = root.getroot().tag
616         ns = ns[0:ns.rindex("}")+1]
617
618         patterns = None
619         for elm in root.getiterator("%sdata" % ns):
620             if elm.attrib["type"] == "patterns":
621                 patterns = elm.find("%slocation" % ns).attrib['href']
622                 break
623
624         comps = None
625         for elm in root.getiterator("%sdata" % ns):
626             if elm.attrib["type"] == "group_gz":
627                 comps = elm.find("%slocation" % ns).attrib['href']
628                 break
629         if not comps:
630             for elm in root.getiterator("%sdata" % ns):
631                 if elm.attrib["type"] == "group":
632                     comps = elm.find("%slocation" % ns).attrib['href']
633                     break
634
635         primary_type = None
636         for elm in root.getiterator("%sdata" % ns):
637             if elm.attrib["type"] == "primary_db":
638                 primary_type=".sqlite"
639                 break
640
641         if not primary_type:
642             for elm in root.getiterator("%sdata" % ns):
643                 if elm.attrib["type"] == "primary":
644                     primary_type=".xml"
645                     break
646
647         if not primary_type:
648             continue
649
650         primary = elm.find("%slocation" % ns).attrib['href']
651         primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
652
653         if patterns:
654             patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
655
656         if comps:
657             comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
658
659         """ Get repo key """
660         try:
661             repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
662         except CreatorError:
663             repokey = None
664             print "Warning: can't get %s/%s" % (baseurl, "repodata/repomd.xml.key")
665
666         my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
667     return my_repo_metadata
668
669 def get_arch(repometadata):
670     archlist = []
671     for repo in repometadata:
672         if repo["primary"].endswith(".xml"):
673             root = xmlparse(repo["primary"])
674             ns = root.getroot().tag
675             ns = ns[0:ns.rindex("}")+1]
676             for elm in root.getiterator("%spackage" % ns):
677                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
678                     arch = elm.find("%sarch" % ns).text
679                     if arch not in archlist:
680                         archlist.append(arch)
681         elif repo["primary"].endswith(".sqlite"):
682             con = sqlite.connect(repo["primary"])
683             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
684                 if row[0] not in archlist:
685                     archlist.append(row[0])
686
687             con.close()
688     return archlist
689
690
691 def get_package(pkg, repometadata, arch = None):
692     ver = ""
693     target_repo = None
694     for repo in repometadata:
695         if repo["primary"].endswith(".xml"):
696             root = xmlparse(repo["primary"])
697             ns = root.getroot().tag
698             ns = ns[0:ns.rindex("}")+1]
699             for elm in root.getiterator("%spackage" % ns):
700                 if elm.find("%sname" % ns).text == pkg:
701                     if elm.find("%sarch" % ns).text != "src":
702                         version = elm.find("%sversion" % ns)
703                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
704                         if tmpver > ver:
705                             ver = tmpver
706                             location = elm.find("%slocation" % ns)
707                             pkgpath = "%s" % location.attrib['href']
708                             target_repo = repo
709                         break
710         if repo["primary"].endswith(".sqlite"):
711             con = sqlite.connect(repo["primary"])
712             if not arch:
713                 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
714                     tmpver = "%s-%s" % (row[0], row[1])
715                     if tmpver > ver:
716                         pkgpath = "%s" % row[2]
717                         target_repo = repo
718                     break
719             else:
720                 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
721                     tmpver = "%s-%s" % (row[0], row[1])
722                     if tmpver > ver:
723                         pkgpath = "%s" % row[2]
724                         target_repo = repo
725                     break
726             con.close()
727     if target_repo: 
728         makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
729         url = str(target_repo["baseurl"] + "/" + pkgpath)
730         filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
731         pkg = myurlgrab(url, filename, target_repo["proxies"])
732         return pkg
733     else:
734         return None
735
736 def get_source_name(pkg, repometadata):
737
738     def get_bin_name(pkg):
739         m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
740         if m:
741             return m.group(1)
742         return None
743
744     def get_src_name(srpm):
745         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
746         if m:
747             return m.group(1)
748         return None
749
750     ver = ""
751     target_repo = None
752
753     pkg_name = get_bin_name(pkg)
754     if not pkg_name:
755         return None
756
757     for repo in repometadata:
758         if repo["primary"].endswith(".xml"):
759             root = xmlparse(repo["primary"])
760             ns = root.getroot().tag
761             ns = ns[0:ns.rindex("}")+1]
762             for elm in root.getiterator("%spackage" % ns):
763                 if elm.find("%sname" % ns).text == pkg_name:
764                     if elm.find("%sarch" % ns).text != "src":
765                         version = elm.find("%sversion" % ns)
766                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
767                         if tmpver > ver:
768                             ver = tmpver
769                             fmt = elm.find("%sformat" % ns)
770                             if fmt:
771                                 fns = fmt.getchildren()[0].tag
772                                 fns = fns[0:fns.rindex("}")+1]
773                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
774                                 target_repo = repo
775                         break
776
777         if repo["primary"].endswith(".sqlite"):
778             con = sqlite.connect(repo["primary"])
779             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
780                 tmpver = "%s-%s" % (row[0], row[1])
781                 if tmpver > ver:
782                     pkgpath = "%s" % row[2]
783                     target_repo = repo
784                 break
785             con.close()
786     if target_repo:
787         return get_src_name(pkgpath)
788     else:
789         return None
790
791 def get_release_no(repometadata, distro="meego"):
792     cpio = find_binary_path("cpio")
793     rpm2cpio = find_binary_path("rpm2cpio")
794     release_pkg = get_package("%s-release" % distro, repometadata)
795     if release_pkg:
796         tmpdir = mkdtemp()
797         oldcwd = os.getcwd()
798         os.chdir(tmpdir)
799         p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
800         p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
801         p2.communicate()
802         f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
803         content = f.read()
804         f.close()
805         os.chdir(oldcwd)
806         shutil.rmtree(tmpdir, ignore_errors = True)
807         return content.split(" ")[2]
808     else:
809         return "UNKNOWN"
810
811 def get_kickstarts_from_repos(repometadata):
812     kickstarts = []
813     for repo in repometadata:
814         try:
815             root = xmlparse(repo["repomd"])
816         except SyntaxError:
817             raise CreatorError("repomd.xml syntax error.")
818
819         ns = root.getroot().tag
820         ns = ns[0:ns.rindex("}")+1]
821
822         for elm in root.getiterator("%sdata" % ns):
823             if elm.attrib["type"] == "image-config":
824                 break
825
826         if elm.attrib["type"] != "image-config":
827             continue
828
829         location = elm.find("%slocation" % ns)
830         image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
831         filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
832
833         image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
834
835         try:
836             root = xmlparse(image_config)
837         except SyntaxError:
838             raise CreatorError("image-config.xml syntax error.")
839
840         for elm in root.getiterator("config"):
841             path = elm.find("path").text
842             path = path.replace("images-config", "image-config")
843             description = elm.find("description").text
844             makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
845             url = path
846             if "http" not in path:
847                 url = str(repo["baseurl"] + "/" + path)
848             filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
849             path = myurlgrab(url, filename, repo["proxies"])
850             kickstarts.append({"filename":path,"description":description})
851         return kickstarts
852
853 def select_ks(ksfiles):
854     print "Available kickstart files:"
855     i = 0
856     for ks in ksfiles:
857         i += 1
858         print "\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"]))
859     while True:
860         choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
861         if choice.lower() == "q":
862             sys.exit(1)
863         if choice.isdigit():
864             choice = int(choice)
865             if choice >= 1 and choice <= i:
866                 break
867
868     return ksfiles[choice-1]["filename"]
869
870
871 def get_pkglist_in_patterns(group, patterns):
872     found = False
873     pkglist = []
874     try:
875         root = xmlparse(patterns)
876     except SyntaxError:
877         raise SyntaxError("%s syntax error." % patterns)
878
879     for elm in list(root.getroot()):
880         ns = elm.tag
881         ns = ns[0:ns.rindex("}")+1]
882         name = elm.find("%sname" % ns)
883         summary = elm.find("%ssummary" % ns)
884         if name.text == group or summary.text == group:
885             found = True
886             break
887
888     if not found:
889         return pkglist
890
891     found = False
892     for requires in list(elm):
893         if requires.tag.endswith("requires"):
894             found = True
895             break
896
897     if not found:
898         return pkglist
899
900     for pkg in list(requires):
901         pkgname = pkg.attrib["name"]
902         if pkgname not in pkglist:
903             pkglist.append(pkgname)
904
905     return pkglist
906
907 def get_pkglist_in_comps(group, comps):
908     found = False
909     pkglist = []
910     try:
911         root = xmlparse(comps)
912     except SyntaxError:
913         raise SyntaxError("%s syntax error." % comps)
914
915     for elm in root.getiterator("group"):
916         id = elm.find("id")
917         name = elm.find("name")
918         if id.text == group or name.text == group:
919             packagelist = elm.find("packagelist")
920             found = True
921             break
922
923     if not found:
924         return pkglist
925
926     for require in elm.getiterator("packagereq"):
927         if require.tag.endswith("packagereq"):
928             pkgname = require.text
929         if pkgname not in pkglist:
930             pkglist.append(pkgname)
931
932     return pkglist
933
934 def is_statically_linked(binary):
935     ret = False
936     dev_null = os.open("/dev/null", os.O_WRONLY)
937     filecmd = find_binary_path("file")
938     args = [ filecmd, binary ]
939     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
940     output = file.communicate()[0]
941     os.close(dev_null)
942     if output.find(", statically linked, ") > 0:
943         ret = True
944     return ret
945
946 def setup_qemu_emulator(rootdir, arch):
947     # mount binfmt_misc if it doesn't exist
948     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
949         modprobecmd = find_binary_path("modprobe")
950         subprocess.call([modprobecmd, "binfmt_misc"])
951     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
952         mountcmd = find_binary_path("mount")
953         subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
954
955     # qemu_emulator is a special case, we can't use find_binary_path
956     # qemu emulator should be a statically-linked executable file
957     qemu_emulator = "/usr/bin/qemu-arm"
958     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
959         qemu_emulator = "/usr/bin/qemu-arm-static"
960     if not os.path.exists(qemu_emulator):
961         raise CreatorError("Please install a statically-linked qemu-arm")
962     if not os.path.exists(rootdir + "/usr/bin"):
963         makedirs(rootdir + "/usr/bin")
964     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
965
966     # disable selinux, selinux will block qemu emulator to run
967     if os.path.exists("/usr/sbin/setenforce"):
968         subprocess.call(["/usr/sbin/setenforce", "0"])
969
970     node = "/proc/sys/fs/binfmt_misc/arm"
971     if is_statically_linked(qemu_emulator) and os.path.exists(node):
972         return qemu_emulator
973
974     # unregister it if it has been registered and is a dynamically-linked executable
975     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
976         qemu_unregister_string = "-1\n"
977         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
978         fd.write(qemu_unregister_string)
979         fd.close()
980
981     # register qemu emulator for interpreting other arch executable file
982     if not os.path.exists(node):
983         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
984         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
985         fd.write(qemu_arm_string)
986         fd.close()
987
988     return qemu_emulator
989
990 def create_release(config, destdir, name, outimages, release):
991     """ TODO: This functionality should really be in creator.py inside the
992     ImageCreator class. """
993
994     # For virtual machine images, we have a subdir for it, this is unnecessary
995     # for release
996     thatsubdir = None
997     for i in range(len(outimages)):
998         file = outimages[i]
999         if not os.path.isdir(file) and os.path.dirname(file) != destdir:
1000             thatsubdir = os.path.dirname(file)
1001             newfile = os.path.join(destdir, os.path.basename(file))
1002             shutil.move(file, newfile)
1003             outimages[i] = newfile
1004     if thatsubdir:
1005         shutil.rmtree(thatsubdir, ignore_errors = True)
1006
1007     """ Create release directory and files """
1008     os.system ("cp %s %s/%s.ks" % (config, destdir, name))
1009     # When building a release we want to make sure the .ks 
1010     # file generates the same build even when --release= is not used.
1011     fd = open(config, "r")
1012     kscont = fd.read()
1013     fd.close()
1014     kscont = kscont.replace("@BUILD_ID@",release)
1015     fd = open("%s/%s.ks" % (destdir,name), "w")
1016     fd.write(kscont)
1017     fd.close()
1018     outimages.append("%s/%s.ks" % (destdir,name))
1019
1020     # Using system + mv, because of * in filename.
1021     os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
1022     outimages.append("%s/%s.packages" % (destdir,name))
1023
1024     d = os.listdir(destdir)
1025     for f in d:
1026         if f.endswith(".iso"):
1027             ff = f.replace(".iso", ".img")
1028             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1029             outimages.append("%s/%s" %(destdir, ff))
1030         elif f.endswith(".usbimg"):
1031             ff = f.replace(".usbimg", ".img")
1032             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1033             outimages.append("%s/%s" %(destdir, ff))
1034
1035     fd = open(destdir + "/MANIFEST", "w")
1036     d = os.listdir(destdir)
1037     for f in d:
1038         if f == "MANIFEST":
1039             continue
1040         if os.path.exists("/usr/bin/md5sum"):
1041             p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
1042                              stdout=subprocess.PIPE)
1043             (md5sum, errorstr) = p.communicate()
1044             if p.returncode != 0:
1045                 logging.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
1046             else:
1047                 md5sum = md5sum.split(" ")[0]
1048                 fd.write(md5sum+" "+f+"\n")
1049
1050     outimages.append("%s/MANIFEST" % destdir)
1051     fd.close()
1052
1053     """ Update the file list. """
1054     updated_list = []
1055     for file in outimages:
1056         if os.path.exists("%s" % file):
1057             updated_list.append(file)
1058
1059     return updated_list
1060
1061 def get_local_distro():
1062     print "Local linux distribution:"
1063     for file in glob.glob("/etc/*-release"):
1064         fd = open(file, "r")
1065         content = fd.read()
1066         fd.close()
1067         print content
1068     if os.path.exists("/etc/issue"):
1069         fd = open("/etc/issue", "r")
1070         content = fd.read()
1071         fd.close()
1072         print content
1073     print "Local Kernel version: " + os.uname()[2]
1074
1075 def check_mic_installation(argv):
1076     creator_name = os.path.basename(argv[0])
1077     if os.path.exists("/usr/local/bin/" + creator_name) \
1078         and os.path.exists("/usr/bin/" + creator_name):
1079         raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from  installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
1080
1081 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1082
1083     def get_source_repometadata(repometadata):
1084         src_repometadata=[]
1085         for repo in repometadata:
1086             if repo["name"].endswith("-source"):
1087                 src_repometadata.append(repo)
1088         if src_repometadata:
1089             return src_repometadata
1090         return None
1091
1092     def get_src_name(srpm):
1093         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
1094         if m:
1095             return m.group(1)
1096         return None    
1097
1098     src_repometadata = get_source_repometadata(repometadata)
1099
1100     if not src_repometadata:
1101         print "No source repo found"
1102         return None
1103
1104     src_pkgs = []
1105     lpkgs_dict = {}
1106     lpkgs_path = []
1107     for repo in src_repometadata:
1108         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1109         lpkgs_path += glob.glob(cachepath)
1110     
1111     for lpkg in lpkgs_path:
1112         lpkg_name = get_src_name(os.path.basename(lpkg))
1113         lpkgs_dict[lpkg_name] = lpkg
1114     localpkgs = lpkgs_dict.keys()
1115     
1116     cached_count = 0
1117     destdir = instroot+'/usr/src/SRPMS'
1118     if not os.path.exists(destdir):
1119         os.makedirs(destdir)
1120     
1121     srcpkgset = set()
1122     for _pkg in pkgs:
1123         srcpkg_name = get_source_name(_pkg, repometadata)
1124         if not srcpkg_name:
1125             return None
1126         srcpkgset.add(srcpkg_name)
1127     
1128     for pkg in list(srcpkgset):
1129         if pkg in localpkgs:
1130             cached_count += 1
1131             shutil.copy(lpkgs_dict[pkg], destdir)
1132             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1133         else:
1134             src_pkg = get_package(pkg, src_repometadata, 'src')
1135             if src_pkg:
1136                 shutil.copy(src_pkg, destdir)            
1137                 src_pkgs.append(src_pkg)
1138     print '--------------------------------------------------'
1139     print "%d source packages gotten from cache" %cached_count
1140
1141     return src_pkgs
1142
1143 def add_optparser(arg):
1144     def decorate(f):
1145         if not hasattr(f, "optparser"):
1146             f.optparser = arg
1147         return f
1148     return decorate
1149
1150 def setup_chrootenv(chrootdir, bindmounts = None):##move to mic/utils/misc
1151     global chroot_lockfd, chroot_lock
1152     def get_bind_mounts(chrootdir, bindmounts):
1153         chrootmounts = []
1154         if bindmounts in ("", None):
1155             bindmounts = ""
1156         mounts = bindmounts.split(";")
1157         for mount in mounts:
1158             if mount == "":
1159                 continue
1160             srcdst = mount.split(":")
1161             srcdst[0] = os.path.abspath(os.path.expanduser(srcdst[0]))
1162             if len(srcdst) == 1:
1163                srcdst.append("none")
1164             if not os.path.isdir(srcdst[0]):
1165                 continue
1166             if srcdst[0] in ("/proc", "/proc/sys/fs/binfmt_misc", "/", "/sys", "/dev", "/dev/pts", "/dev/shm", "/var/lib/dbus", "/var/run/dbus", "/var/lock"):
1167                 pwarning("%s will be mounted by default." % srcdst[0])
1168                 continue
1169             if srcdst[1] == "" or srcdst[1] == "none":
1170                 srcdst[1] = None
1171             else:
1172                 srcdst[1] = os.path.abspath(os.path.expanduser(srcdst[1]))
1173                 if os.path.isdir(chrootdir + "/" + srcdst[1]):
1174                     pwarning("%s has existed in %s , skip it." % (srcdst[1], chrootdir))
1175                     continue
1176             chrootmounts.append(fs_related.BindChrootMount(srcdst[0], chrootdir, srcdst[1]))
1177     
1178         """Default bind mounts"""
1179         chrootmounts.append(fs_related.BindChrootMount("/proc", chrootdir, None))
1180         chrootmounts.append(fs_related.BindChrootMount("/proc/sys/fs/binfmt_misc", chrootdir, None))
1181         chrootmounts.append(fs_related.BindChrootMount("/sys", chrootdir, None))
1182         chrootmounts.append(fs_related.BindChrootMount("/dev", chrootdir, None))
1183         chrootmounts.append(fs_related.BindChrootMount("/dev/pts", chrootdir, None))
1184         chrootmounts.append(fs_related.BindChrootMount("/dev/shm", chrootdir, None))
1185         chrootmounts.append(fs_related.BindChrootMount("/var/lib/dbus", chrootdir, None))
1186         chrootmounts.append(fs_related.BindChrootMount("/var/run/dbus", chrootdir, None))
1187         chrootmounts.append(fs_related.BindChrootMount("/var/lock", chrootdir, None))
1188         chrootmounts.append(fs_related.BindChrootMount("/", chrootdir, "/parentroot", "ro"))
1189         for kernel in os.listdir("/lib/modules"):
1190             chrootmounts.append(fs_related.BindChrootMount("/lib/modules/" + kernel, chrootdir, None, "ro"))
1191     
1192         return chrootmounts
1193
1194     def bind_mount(chrootmounts):
1195         for b in chrootmounts:
1196             print "bind_mount: %s -> %s" % (b.src, b.dest)
1197             b.mount()
1198
1199     def setup_resolv(chrootdir):
1200         shutil.copyfile("/etc/resolv.conf", chrootdir + "/etc/resolv.conf")
1201
1202     globalmounts = get_bind_mounts(chrootdir, bindmounts)
1203     bind_mount(globalmounts)
1204     setup_resolv(chrootdir)
1205     mtab = "/etc/mtab"
1206     dstmtab = chrootdir + mtab
1207     if not os.path.islink(dstmtab):
1208         shutil.copyfile(mtab, dstmtab)
1209     chroot_lock = os.path.join(chrootdir, ".chroot.lock")
1210     chroot_lockfd = open(chroot_lock, "w")
1211     return globalmounts    
1212
1213 def cleanup_chrootenv(chrootdir, bindmounts = None, globalmounts = []):
1214     global chroot_lockfd, chroot_lock
1215     def bind_unmount(chrootmounts):
1216         chrootmounts.reverse()
1217         for b in chrootmounts:
1218             print "bind_unmount: %s -> %s" % (b.src, b.dest)
1219             b.unmount()
1220
1221     def cleanup_resolv(chrootdir):
1222         fd = open(chrootdir + "/etc/resolv.conf", "w")
1223         fd.truncate(0)
1224         fd.close()
1225
1226     def kill_processes(chrootdir):
1227         for file in glob.glob("/proc/*/root"):
1228             try:
1229                 if os.readlink(file) == chrootdir:
1230                     pid = int(file.split("/")[2])
1231                     os.kill(pid, 9)
1232             except:
1233                 pass
1234
1235     def cleanup_mountdir(chrootdir, bindmounts):
1236         if bindmounts == "" or bindmounts == None:
1237             return
1238         chrootmounts = []
1239         mounts = bindmounts.split(";")
1240         for mount in mounts:
1241             if mount == "":
1242                 continue
1243             srcdst = mount.split(":")
1244             if len(srcdst) == 1:
1245                srcdst.append("none")
1246             if srcdst[1] == "" or srcdst[1] == "none":
1247                 srcdst[1] = srcdst[0]
1248             srcdst[1] = os.path.abspath(os.path.expanduser(srcdst[1]))
1249             tmpdir = chrootdir + "/" + srcdst[1]
1250             if os.path.isdir(tmpdir):
1251                 if len(os.listdir(tmpdir)) == 0:
1252                     shutil.rmtree(tmpdir, ignore_errors = True)
1253                 else:
1254                     print "Warning: dir %s isn't empty." % tmpdir
1255     
1256     chroot_lockfd.close()
1257     bind_unmount(globalmounts)
1258     if not fs_releate.my_fuser(chroot_lock):
1259         tmpdir = chrootdir + "/parentroot"
1260         if len(os.listdir(tmpdir)) == 0:
1261             shutil.rmtree(tmpdir, ignore_errors = True)
1262         cleanup_resolv(chrootdir)
1263         if os.path.exists(chrootdir + "/etc/mtab"):
1264             os.unlink(chrootdir + "/etc/mtab")
1265         kill_processes(chrootdir)
1266     cleanup_mountdir(chrootdir, bindmounts)
1267
1268 def chroot(chrootdir, bindmounts = None, execute = "/bin/bash"):
1269     def mychroot():
1270         os.chroot(chrootdir)
1271         os.chdir("/")
1272
1273     dev_null = os.open("/dev/null", os.O_WRONLY)
1274     files_to_check = ["/bin/bash", "/sbin/init"]
1275     
1276     architecture_found = False
1277
1278     """ Register statically-linked qemu-arm if it is an ARM fs """
1279     qemu_emulator = None
1280
1281     for ftc in files_to_check:
1282         ftc = "%s/%s" % (chrootdir,ftc)
1283         
1284         # Return code of 'file' is "almost always" 0 based on some man pages
1285         # so we need to check the file existance first.
1286         if not os.path.exists(ftc):
1287             continue
1288
1289         filecmd = find_binary_path("file")
1290         initp1 = subprocess.Popen([filecmd, ftc], stdout=subprocess.PIPE, stderr=dev_null)
1291         fileOutput = initp1.communicate()[0].strip().split("\n")
1292         
1293         for i in range(len(fileOutput)):
1294             if fileOutput[i].find("ARM") > 0:
1295                 qemu_emulator = setup_qemu_emulator(chrootdir, "arm")
1296                 architecture_found = True
1297                 break
1298             if fileOutput[i].find("Intel") > 0:
1299                 architecture_found = True
1300                 break
1301                 
1302         if architecture_found:
1303             break
1304                 
1305     os.close(dev_null)
1306     if not architecture_found:
1307         raise errors.CreatorError("Failed to get architecture from any of the following files %s from chroot." % files_to_check)
1308
1309     try:
1310         print "Launching shell. Exit to continue."
1311         print "----------------------------------"
1312         globalmounts = setup_chrootenv(chrootdir, bindmounts)
1313         args = shlex.split(execute)
1314         subprocess.call(args, preexec_fn = mychroot)
1315     except OSError, (err, msg):
1316         raise errors.CreatorError("Failed to chroot: %s" % msg)
1317     finally:
1318         cleanup_chrootenv(chrootdir, bindmounts, globalmounts)
1319         if qemu_emulator:
1320             os.unlink(chrootdir + qemu_emulator)        
1321