initial import code into git
[platform/upstream/mic.git] / micng / utils / misc.py
1 #
2 # misc.py : miscellaneous utilities
3 #
4 # Copyright 2010, Intel Inc.
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Library General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18
19
20 import os
21 import sys
22 import subprocess
23 import logging
24 import tempfile
25 import re
26 import shutil
27 import glob
28 import xml.dom.minidom
29 import hashlib
30 import urlparse
31 import locale
32 import codecs
33
34 try:
35     import sqlite3 as sqlite
36 except ImportError:
37     import sqlite
38 import _sqlitecache
39
40 try:
41     from xml.etree import cElementTree
42 except ImportError:
43     import cElementTree
44 xmlparse = cElementTree.parse
45
46 from errors import *
47 from fs_related import *
48
49
50 def setlocale():
51     try:
52         locale.setlocale(locale.LC_ALL,'')
53     except locale.Error:
54         os.environ['LC_ALL'] = 'C'
55         locale.setlocale(locale.LC_ALL,'C')
56     sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
57     sys.stdout.errors = 'replace'
58
59 def get_extension_name(path):
60     match = re.search("(?<=\.)\w+$", path)
61     if match:
62         return match.group(0)
63     else:
64         return None
65
66 def get_image_type(path):
67     if os.path.isdir(path):
68         if ismeego(path):
69             return "fs"
70         return None
71     maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
72     extension = get_extension_name(path)
73     if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
74         return maptab[extension]
75
76     fd = open(path, "rb")
77     file_header = fd.read(1024)
78     fd.close()
79     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
80     if file_header[0:len(vdi_flag)] == vdi_flag:
81         return maptab["vdi"]
82
83     dev_null = os.open("/dev/null", os.O_WRONLY)
84     filecmd = find_binary_path("file")
85     args = [ filecmd, path ]
86     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
87     output = file.communicate()[0]
88     os.close(dev_null)
89     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
90     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
91     rawptn = re.compile(r".*x86 boot sector.*")
92     vmdkptn = re.compile(r".*VMware. disk image.*")
93     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
94     if isoptn.match(output):
95         return maptab["iso"]
96     elif usbimgptn.match(output):
97         return maptab["usbimg"]
98     elif rawptn.match(output):
99         return maptab["raw"]
100     elif vmdkptn.match(output):
101         return maptab["vmdk"]
102     elif ext3fsimgptn.match(output):
103         return "ext3fsimg"
104     else:
105         return None
106
107 def get_file_size(file):
108     """Return size in MB unit"""
109     du = find_binary_path("du")
110     dev_null = os.open("/dev/null", os.O_WRONLY)
111     duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
112                                stdout=subprocess.PIPE, stderr=dev_null)
113     duOutput = duProc.communicate()[0]
114     if duProc.returncode:
115         raise CreatorError("Failed to run %s" % du)
116
117     size1 = int(duOutput.split()[0])
118     duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
119                                stdout=subprocess.PIPE, stderr=dev_null)
120     duOutput = duProc.communicate()[0]
121     if duProc.returncode:
122         raise CreatorError("Failed to run %s" % du)
123
124     size2 = int(duOutput.split()[0])
125     os.close(dev_null)
126     if size1 > size2:
127         return size1
128     else:
129         return size2
130
131 def get_filesystem_avail(fs):
132     vfstat = os.statvfs(fs)
133     return vfstat.f_bavail * vfstat.f_bsize
134
135 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
136     #convert disk format
137     if dstfmt != "raw":
138         raise CreatorError("Invalid destination image format: %s" % dstfmt)
139     logging.debug("converting %s image to %s" % (srcimg, dstimg))
140     if srcfmt == "vmdk":
141         path = find_binary_path("qemu-img")
142         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
143     elif srcfmt == "vdi":
144         path = find_binary_path("VBoxManage")
145         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
146     else:
147         raise CreatorError("Invalid soure image format: %s" % srcfmt)
148
149     rc = subprocess.call(argv)
150     if rc == 0:
151         logging.debug("convert successful")
152     if rc != 0:
153         raise CreatorError("Unable to convert disk to %s" % dstfmt)
154
155 def myxcopytree(src, dst):
156     dev_null = os.open("/dev/null", os.O_WRONLY)
157     dirnames = os.listdir(src)
158     copycmd = find_binary_path("cp")
159     for dir in dirnames:
160         args = [ copycmd, "-af", src + "/" + dir, dst ]
161         subprocess.call(args, stdout=dev_null, stderr=dev_null)
162     os.close(dev_null)
163     ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
164     for exclude in ignores:
165         if os.path.exists(dst + "/" + exclude):
166             os.unlink(dst + "/" + exclude)
167
168 def uncompress_squashfs(squashfsimg, outdir):
169     """Uncompress file system from squshfs image"""
170     unsquashfs = find_binary_path("unsquashfs")
171     args = [ unsquashfs, "-d", outdir, squashfsimg ]
172     rc = subprocess.call(args)
173     if (rc != 0):
174         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
175
176 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
177     makedirs(dir)
178     return tempfile.mkdtemp(dir = dir, prefix = prefix)
179
180 def ismeego(rootdir):
181     ret = False
182     if (os.path.exists(rootdir + "/etc/moblin-release") \
183        or os.path.exists(rootdir + "/etc/meego-release")) \
184        and os.path.exists(rootdir + "/etc/inittab") \
185        and os.path.exists(rootdir + "/etc/rc.sysinit") \
186        and glob.glob(rootdir + "/boot/vmlinuz-*"):
187         ret = True
188
189     return ret
190
191
192 def is_meego_bootstrap(rootdir):
193     ret = False
194     if (os.path.exists(rootdir + "/etc/moblin-release") \
195        or os.path.exists(rootdir + "/etc/meego-release")) \
196        and os.path.exists(rootdir + "/usr/bin/python") \
197        and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
198         ret = True
199
200     return ret
201
202
203 _my_proxies = {}
204 _my_noproxy = None
205 _my_noproxy_list = []
206
207 def set_proxy_environ():
208     global _my_noproxy, _my_proxies
209     if not _my_proxies:
210         return
211     for key in _my_proxies.keys():
212         os.environ[key + "_proxy"] = _my_proxies[key]
213     if not _my_noproxy:
214         return
215     os.environ["no_proxy"] = _my_noproxy
216
217 def unset_proxy_environ():
218    if os.environ.has_key("http_proxy"):
219        del os.environ["http_proxy"]
220    if os.environ.has_key("https_proxy"):
221        del os.environ["https_proxy"]
222    if os.environ.has_key("ftp_proxy"):
223        del os.environ["ftp_proxy"]
224    if os.environ.has_key("all_proxy"):
225        del os.environ["all_proxy"]
226    if os.environ.has_key("no_proxy"):
227        del os.environ["no_proxy"]
228    if os.environ.has_key("HTTP_PROXY"):
229        del os.environ["HTTP_PROXY"]
230    if os.environ.has_key("HTTPS_PROXY"):
231        del os.environ["HTTPS_PROXY"]
232    if os.environ.has_key("FTP_PROXY"):
233        del os.environ["FTP_PROXY"]
234    if os.environ.has_key("ALL_PROXY"):
235        del os.environ["ALL_PROXY"]
236    if os.environ.has_key("NO_PROXY"):
237        del os.environ["NO_PROXY"]
238
239 def _set_proxies(proxy = None, no_proxy = None):
240     """Return a dictionary of scheme -> proxy server URL mappings."""
241     global _my_noproxy, _my_proxies
242     _my_proxies = {}
243     _my_noproxy = None
244     proxies = []
245     if proxy:
246        proxies.append(("http_proxy", proxy))
247     if no_proxy:
248        proxies.append(("no_proxy", no_proxy))
249
250     """Get proxy settings from environment variables if not provided"""
251     if not proxy and not no_proxy:
252        proxies = os.environ.items()
253
254        """ Remove proxy env variables, urllib2 can't handle them correctly """
255        unset_proxy_environ()
256
257     for name, value in proxies:
258         name = name.lower()
259         if value and name[-6:] == '_proxy':
260             if name[0:2] != "no":
261                 _my_proxies[name[:-6]] = value
262             else:
263                 _my_noproxy = value
264
265 def ip_to_int(ip):
266     ipint=0
267     shift=24
268     for dec in ip.split("."):
269         ipint |= int(dec) << shift
270         shift -= 8
271     return ipint
272
273 def int_to_ip(val):
274     ipaddr=""
275     shift=0
276     for i in range(4):
277         dec = val >> shift
278         dec &= 0xff
279         ipaddr = ".%d%s" % (dec, ipaddr)
280         shift += 8
281     return ipaddr[1:]
282
283 def isip(host):
284     if host.replace(".", "").isdigit():
285         return True
286     return False
287
288 def set_noproxy_list():
289     global _my_noproxy, _my_noproxy_list
290     _my_noproxy_list = []
291     if not _my_noproxy:
292         return
293     for item in _my_noproxy.split(","):
294         item = item.strip()
295         if not item:
296             continue
297         if item[0] != '.' and item.find("/") == -1:
298             """ Need to match it """
299             _my_noproxy_list.append({"match":0,"needle":item})
300         elif item[0] == '.':
301             """ Need to match at tail """
302             _my_noproxy_list.append({"match":1,"needle":item})
303         elif item.find("/") > 3:
304             """ IP/MASK, need to match at head """
305             needle = item[0:item.find("/")].strip()
306             ip = ip_to_int(needle)
307             netmask = 0
308             mask = item[item.find("/")+1:].strip()
309
310             if mask.isdigit():
311                 netmask = int(mask)
312                 netmask = ~((1<<(32-netmask)) - 1)
313                 ip &= netmask
314             else:
315                 shift=24
316                 netmask=0
317                 for dec in mask.split("."):
318                     netmask |= int(dec) << shift
319                     shift -= 8
320                 ip &= netmask
321             _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
322
323 def isnoproxy(url):
324     (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
325     if '@' in host:
326         user_pass, host = host.split('@', 1)
327     if ':' in host:
328         host, port = host.split(':', 1)
329     hostisip = isip(host)
330     for item in _my_noproxy_list:
331         if hostisip and item["match"] <= 1:
332             continue
333         if item["match"] == 2 and hostisip:
334             if (ip_to_int(host) & item["netmask"]) == item["needle"]:
335                 return True
336         if item["match"] == 0:
337             if host == item["needle"]:
338                 return True
339         if item["match"] == 1:
340             if host.rfind(item["needle"]) > 0:
341                 return True
342     return False
343
344 def set_proxies(proxy = None, no_proxy = None):
345     _set_proxies(proxy, no_proxy)
346     set_noproxy_list()
347
348 def get_proxy(url):
349     if url[0:4] == "file" or isnoproxy(url):
350         return None
351     type = url[0:url.index(":")]
352     proxy = None
353     if _my_proxies.has_key(type):
354         proxy = _my_proxies[type]
355     elif _my_proxies.has_key("http"):
356         proxy = _my_proxies["http"]
357     else:
358         proxy = None
359     return proxy
360
361 def remap_repostr(repostr, siteconf):
362     items = repostr.split(",")
363     name = None
364     baseurl = None
365     for item in items:
366         subitems = item.split(":")
367         if subitems[0] == "name":
368             name = subitems[1]
369         if subitems[0] == "baseurl":
370             baseurl = item[8:]
371     if not baseurl:
372         baseurl = repostr
373
374     for section in siteconf._sections:
375         if section != "main":
376             if not siteconf.has_option(section, "enabled") or siteconf.get(section, "enabled") == "0":
377                 continue
378             if siteconf.has_option(section, "equalto"):
379                 equalto = siteconf.get(section, "equalto")
380                 if (name and equalto == name) or (baseurl and equalto == baseurl):
381                     remap_baseurl = siteconf.get(section, "baseurl")
382                     repostr = repostr.replace(baseurl, remap_baseurl)
383                     return repostr
384
385     return repostr
386
387
388 def get_temp_reponame(baseurl):
389     md5obj = hashlib.md5(baseurl)
390     tmpreponame = "%s" % md5obj.hexdigest()
391     return tmpreponame
392
393 def get_repostr(repo, siteconf = None):
394     if siteconf:
395         repo = remap_repostr(repo, siteconf)
396     keys = ("baseurl", "mirrorlist", "name", "cost", "includepkgs", "excludepkgs", "proxy", "save", "proxyuser", "proxypasswd", "debuginfo", "source", "gpgkey")
397     repostr = "repo"
398     items = repo.split(",")
399     if len(items) == 1:
400         subitems = items[0].split(":")
401         if len(subitems) == 1:
402             url = subitems[0]
403             repostr += " --baseurl=%s" % url
404         elif subitems[0] == "baseurl":
405             url = items[0][8:]
406             repostr += " --baseurl=%s" % url
407         elif subitems[0] in ("http", "ftp", "https", "ftps", "file"):
408             url = items[0]
409             repostr += " --baseurl=%s" % url
410         else:
411             raise ValueError("Invalid repo string")
412         if url.find("://") == -1 \
413            or url[0:url.index("://")] not in ("http", "ftp", "https", "ftps", "file") \
414            or url.find("/", url.index("://")+3) == -1:
415             raise ValueError("Invalid repo string")
416     else:
417         if repo.find("baseurl:") == -1 and repo.find("mirrorlist:") == -1:
418             raise ValueError("Invalid repo string")
419         url = None
420         for item in items:
421             if not item:
422                 continue
423             subitems = item.split(":")
424             if subitems[0] in keys:
425                 if subitems[0] in ("baseurl", "mirrorlist"):
426                     url = item[len(subitems[0])+1:]
427                 if subitems[0] in ("save", "debuginfo", "source"):
428                     repostr += " --%s" % subitems[0]
429                 elif subitems[0] in ("includepkgs", "excludepkgs"):
430                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:].replace(";", ","))
431                 else:
432                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:])
433             else:
434                 raise ValueError("Invalid repo string")
435     if url.find("://") != -1 \
436        and url[0:url.index("://")] in ("http", "ftp", "https", "ftps", "file") \
437        and url.find("/", url.index("://")+3) != -1:
438         if repostr.find("--proxy=") == -1:
439             proxy = get_proxy(url)
440             if proxy:
441                 repostr += " --proxy=%s" % proxy
442     else:
443         raise ValueError("Invalid repo string")
444
445     if repostr.find("--name=") == -1:
446         repostr += " --name=%s" % get_temp_reponame(url)
447
448     return repostr
449
450 DEFAULT_SITECONF_GLOBAL="/etc/mic2/mic2.conf"
451 DEFAULT_SITECONF_USER="~/.mic2.conf"
452
453 def read_siteconf(siteconf = None):
454     from ConfigParser import SafeConfigParser
455
456     my_siteconf_parser = SafeConfigParser()
457     if not siteconf:
458         global_siteconf = DEFAULT_SITECONF_GLOBAL
459         if os.path.isfile(global_siteconf):
460             my_siteconf_parser.read(global_siteconf)
461
462         local_siteconf = os.path.expanduser(DEFAULT_SITECONF_USER)
463         if os.path.isfile(local_siteconf):
464             my_siteconf_parser.read(local_siteconf)
465     else:
466         my_siteconf_parser.read(siteconf)
467
468     if not my_siteconf_parser.sections():
469         return None
470     else:
471         return my_siteconf_parser
472
473 def output_siteconf(siteconf):
474     output = ""
475     if not siteconf:
476         return output
477
478     for section in siteconf.sections():
479         output += "[%s]\n" % section
480         for option in siteconf.options(section):
481             output += "%s=%s\n" % (option, siteconf.get(section, option))
482         output += "\n\n"
483
484     print output
485     return output
486
487 def get_repostrs_from_ks(ks):
488     kickstart_repos = []
489     for repodata in ks.handler.repo.repoList:
490         repostr = ""
491         if hasattr(repodata, "name") and repodata.name:
492             repostr += ",name:" + repodata.name
493         if hasattr(repodata, "baseurl") and repodata.baseurl:
494             repostr += ",baseurl:" + repodata.baseurl
495         if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
496             repostr += ",mirrorlist:" + repodata.mirrorlist
497         if hasattr(repodata, "includepkgs") and repodata.includepkgs:
498             repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
499         if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
500             repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
501         if hasattr(repodata, "cost") and repodata.cost:
502             repostr += ",cost:%d" % repodata.cost
503         if hasattr(repodata, "save") and repodata.save:
504             repostr += ",save:"
505         if hasattr(repodata, "proxy") and repodata.proxy:
506             repostr += ",proxy:" + repodata.proxy
507         if hasattr(repodata, "proxyuser") and repodata.proxy_username:
508             repostr += ",proxyuser:" + repodata.proxy_username
509         if  hasattr(repodata, "proxypasswd") and repodata.proxy_password:
510             repostr += ",proxypasswd:" + repodata.proxy_password
511         if repostr.find("name:") == -1:
512             repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
513         if hasattr(repodata, "debuginfo") and repodata.debuginfo:
514             repostr += ",debuginfo:"
515         if hasattr(repodata, "source") and repodata.source:
516             repostr += ",source:"
517         if  hasattr(repodata, "gpgkey") and repodata.gpgkey:
518             repostr += ",gpgkey:" + repodata.gpgkey
519         kickstart_repos.append(repostr[1:])
520     return kickstart_repos
521
522 def get_repostrs_from_siteconf(siteconf):
523     site_repos = []
524     if not siteconf:
525         return site_repos
526
527     for section in siteconf._sections:
528         if section != "main":
529             repostr = ""
530             if siteconf.has_option(section, "enabled") \
531                and siteconf.get(section, "enabled") == "1" \
532                and (not siteconf.has_option(section, "equalto") or not siteconf.get(section, "equalto")):
533                 if siteconf.has_option(section, "name") and siteconf.get(section, "name"):
534                     repostr += ",name:%s" % siteconf.get(section, "name")
535                 if siteconf.has_option(section, "baseurl") and siteconf.get(section, "baseurl"):
536                     repostr += ",baseurl:%s" % siteconf.get(section, "baseurl")
537                 if siteconf.has_option(section, "mirrorlist") and siteconf.get(section, "mirrorlist"):
538                     repostr += ",mirrorlist:%s" % siteconf.get(section, "mirrorlist")
539                 if siteconf.has_option(section, "includepkgs") and siteconf.get(section, "includepkgs"):
540                     repostr += ",includepkgs:%s" % siteconf.get(section, "includepkgs").replace(",", ";")
541                 if siteconf.has_option(section, "excludepkgs") and siteconf.get(section, "excludepkgs"):
542                     repostr += ",excludepkgs:%s" % siteconf.get(section, "excludepkgs").replace(",", ";")
543                 if siteconf.has_option(section, "cost") and siteconf.get(section, "cost"):
544                     repostr += ",cost:%s" % siteconf.get(section, "cost")
545                 if siteconf.has_option(section, "save") and siteconf.get(section, "save"):
546                     repostr += ",save:"
547                 if siteconf.has_option(section, "proxy") and siteconf.get(section, "proxy"):
548                     repostr += ",proxy:%s" % siteconf.get(section, "proxy")
549                 if siteconf.has_option(section, "proxy_username") and siteconf.get(section, "proxy_username"):
550                     repostr += ",proxyuser:%s" % siteconf.get(section, "proxy_username")
551                 if siteconf.has_option(section, "proxy_password") and siteconf.get(section, "proxy_password"):
552                     repostr += ",proxypasswd:%s" % siteconf.get(section, "proxy_password")
553             if repostr != "":
554                 if repostr.find("name:") == -1:
555                     repostr = ",name:%s" % get_temp_reponame()
556                 site_repos.append(repostr[1:])
557     return site_repos
558
559 def get_uncompressed_data_from_url(url, filename, proxies):
560     filename = myurlgrab(url, filename, proxies)
561     suffix = None
562     if filename.endswith(".gz"):
563         suffix = ".gz"
564         gunzip = find_binary_path('gunzip')
565         subprocess.call([gunzip, "-f", filename])
566     elif filename.endswith(".bz2"):
567         suffix = ".bz2"
568         bunzip2 = find_binary_path('bunzip2')
569         subprocess.call([bunzip2, "-f", filename])
570     if suffix:
571         filename = filename.replace(suffix, "")
572     return filename
573
574 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
575     url = str(baseurl + "/" + filename)
576     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
577     return get_uncompressed_data_from_url(url,filename_tmp,proxies)
578
579 def get_metadata_from_repos(repostrs, cachedir):
580     if not cachedir:
581         CreatorError("No cache dir defined.")
582
583     my_repo_metadata = []
584     for repostr in repostrs:
585         reponame = None
586         baseurl = None
587         proxy = None
588         items = repostr.split(",")
589         for item in items:
590             subitems = item.split(":")
591             if subitems[0] == "name":
592                 reponame = subitems[1]
593             if subitems[0] == "baseurl":
594                 baseurl = item[8:]
595             if subitems[0] == "proxy":
596                 proxy = item[6:]
597             if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
598                 baseurl = item
599         if not proxy:
600             proxy = get_proxy(baseurl)
601         proxies = None
602         if proxy:
603            proxies = {str(proxy.split(":")[0]):str(proxy)}
604         makedirs(cachedir + "/" + reponame)
605         url = str(baseurl + "/repodata/repomd.xml")
606         filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
607         repomd = myurlgrab(url, filename, proxies)
608         try:
609             root = xmlparse(repomd)
610         except SyntaxError:
611             raise CreatorError("repomd.xml syntax error.")
612
613         ns = root.getroot().tag
614         ns = ns[0:ns.rindex("}")+1]
615
616         patterns = None
617         for elm in root.getiterator("%sdata" % ns):
618             if elm.attrib["type"] == "patterns":
619                 patterns = elm.find("%slocation" % ns).attrib['href']
620                 break
621
622         comps = None
623         for elm in root.getiterator("%sdata" % ns):
624             if elm.attrib["type"] == "group_gz":
625                 comps = elm.find("%slocation" % ns).attrib['href']
626                 break
627         if not comps:
628             for elm in root.getiterator("%sdata" % ns):
629                 if elm.attrib["type"] == "group":
630                     comps = elm.find("%slocation" % ns).attrib['href']
631                     break
632
633         primary_type = None
634         for elm in root.getiterator("%sdata" % ns):
635             if elm.attrib["type"] == "primary_db":
636                 primary_type=".sqlite"
637                 break
638
639         if not primary_type:
640             for elm in root.getiterator("%sdata" % ns):
641                 if elm.attrib["type"] == "primary":
642                     primary_type=".xml"
643                     break
644
645         if not primary_type:
646             continue
647
648         primary = elm.find("%slocation" % ns).attrib['href']
649         primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
650
651         if patterns:
652             patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
653
654         if comps:
655             comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
656
657         """ Get repo key """
658         try:
659             repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
660         except CreatorError:
661             repokey = None
662             print "Warning: can't get %s/%s" % (baseurl, "repodata/repomd.xml.key")
663
664         my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
665     return my_repo_metadata
666
667 def get_arch(repometadata):
668     archlist = []
669     for repo in repometadata:
670         if repo["primary"].endswith(".xml"):
671             root = xmlparse(repo["primary"])
672             ns = root.getroot().tag
673             ns = ns[0:ns.rindex("}")+1]
674             for elm in root.getiterator("%spackage" % ns):
675                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
676                     arch = elm.find("%sarch" % ns).text
677                     if arch not in archlist:
678                         archlist.append(arch)
679         elif repo["primary"].endswith(".sqlite"):
680             con = sqlite.connect(repo["primary"])
681             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
682                 if row[0] not in archlist:
683                     archlist.append(row[0])
684
685             con.close()
686     return archlist
687
688
689 def get_package(pkg, repometadata, arch = None):
690     ver = ""
691     target_repo = None
692     for repo in repometadata:
693         if repo["primary"].endswith(".xml"):
694             root = xmlparse(repo["primary"])
695             ns = root.getroot().tag
696             ns = ns[0:ns.rindex("}")+1]
697             for elm in root.getiterator("%spackage" % ns):
698                 if elm.find("%sname" % ns).text == pkg:
699                     if elm.find("%sarch" % ns).text != "src":
700                         version = elm.find("%sversion" % ns)
701                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
702                         if tmpver > ver:
703                             ver = tmpver
704                             location = elm.find("%slocation" % ns)
705                             pkgpath = "%s" % location.attrib['href']
706                             target_repo = repo
707                         break
708         if repo["primary"].endswith(".sqlite"):
709             con = sqlite.connect(repo["primary"])
710             if not arch:
711                 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
712                     tmpver = "%s-%s" % (row[0], row[1])
713                     if tmpver > ver:
714                         pkgpath = "%s" % row[2]
715                         target_repo = repo
716                     break
717             else:
718                 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
719                     tmpver = "%s-%s" % (row[0], row[1])
720                     if tmpver > ver:
721                         pkgpath = "%s" % row[2]
722                         target_repo = repo
723                     break
724             con.close()
725     if target_repo: 
726         makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
727         url = str(target_repo["baseurl"] + "/" + pkgpath)
728         filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
729         pkg = myurlgrab(url, filename, target_repo["proxies"])
730         return pkg
731     else:
732         return None
733
734 def get_source_name(pkg, repometadata):
735
736     def get_bin_name(pkg):
737         m = re.match("(.*)-(.*)-(.*)\.(.*)\.rpm", pkg)
738         if m:
739             return m.group(1)
740         return None
741
742     def get_src_name(srpm):
743         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
744         if m:
745             return m.group(1)
746         return None
747
748     ver = ""
749     target_repo = None
750
751     pkg_name = get_bin_name(pkg)
752     if not pkg_name:
753         return None
754
755     for repo in repometadata:
756         if repo["primary"].endswith(".xml"):
757             root = xmlparse(repo["primary"])
758             ns = root.getroot().tag
759             ns = ns[0:ns.rindex("}")+1]
760             for elm in root.getiterator("%spackage" % ns):
761                 if elm.find("%sname" % ns).text == pkg_name:
762                     if elm.find("%sarch" % ns).text != "src":
763                         version = elm.find("%sversion" % ns)
764                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
765                         if tmpver > ver:
766                             ver = tmpver
767                             fmt = elm.find("%sformat" % ns)
768                             if fmt:
769                                 fns = fmt.getchildren()[0].tag
770                                 fns = fns[0:fns.rindex("}")+1]
771                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
772                                 target_repo = repo
773                         break
774
775         if repo["primary"].endswith(".sqlite"):
776             con = sqlite.connect(repo["primary"])
777             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
778                 tmpver = "%s-%s" % (row[0], row[1])
779                 if tmpver > ver:
780                     pkgpath = "%s" % row[2]
781                     target_repo = repo
782                 break
783             con.close()
784     if target_repo:
785         return get_src_name(pkgpath)
786     else:
787         return None
788
789 def get_release_no(repometadata, distro="meego"):
790     cpio = find_binary_path("cpio")
791     rpm2cpio = find_binary_path("rpm2cpio")
792     release_pkg = get_package("%s-release" % distro, repometadata)
793     if release_pkg:
794         tmpdir = mkdtemp()
795         oldcwd = os.getcwd()
796         os.chdir(tmpdir)
797         p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
798         p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
799         p2.communicate()
800         f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
801         content = f.read()
802         f.close()
803         os.chdir(oldcwd)
804         shutil.rmtree(tmpdir, ignore_errors = True)
805         return content.split(" ")[2]
806     else:
807         return "UNKNOWN"
808
809 def get_kickstarts_from_repos(repometadata):
810     kickstarts = []
811     for repo in repometadata:
812         try:
813             root = xmlparse(repo["repomd"])
814         except SyntaxError:
815             raise CreatorError("repomd.xml syntax error.")
816
817         ns = root.getroot().tag
818         ns = ns[0:ns.rindex("}")+1]
819
820         for elm in root.getiterator("%sdata" % ns):
821             if elm.attrib["type"] == "image-config":
822                 break
823
824         if elm.attrib["type"] != "image-config":
825             continue
826
827         location = elm.find("%slocation" % ns)
828         image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
829         filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
830
831         image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
832
833         try:
834             root = xmlparse(image_config)
835         except SyntaxError:
836             raise CreatorError("image-config.xml syntax error.")
837
838         for elm in root.getiterator("config"):
839             path = elm.find("path").text
840             path = path.replace("images-config", "image-config")
841             description = elm.find("description").text
842             makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
843             url = path
844             if "http" not in path:
845                 url = str(repo["baseurl"] + "/" + path)
846             filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
847             path = myurlgrab(url, filename, repo["proxies"])
848             kickstarts.append({"filename":path,"description":description})
849         return kickstarts
850
851 def select_ks(ksfiles):
852     print "Available kickstart files:"
853     i = 0
854     for ks in ksfiles:
855         i += 1
856         print "\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"]))
857     while True:
858         choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
859         if choice.lower() == "q":
860             sys.exit(1)
861         if choice.isdigit():
862             choice = int(choice)
863             if choice >= 1 and choice <= i:
864                 break
865
866     return ksfiles[choice-1]["filename"]
867
868
869 def get_pkglist_in_patterns(group, patterns):
870     found = False
871     pkglist = []
872     try:
873         root = xmlparse(patterns)
874     except SyntaxError:
875         raise SyntaxError("%s syntax error." % patterns)
876
877     for elm in list(root.getroot()):
878         ns = elm.tag
879         ns = ns[0:ns.rindex("}")+1]
880         name = elm.find("%sname" % ns)
881         summary = elm.find("%ssummary" % ns)
882         if name.text == group or summary.text == group:
883             found = True
884             break
885
886     if not found:
887         return pkglist
888
889     found = False
890     for requires in list(elm):
891         if requires.tag.endswith("requires"):
892             found = True
893             break
894
895     if not found:
896         return pkglist
897
898     for pkg in list(requires):
899         pkgname = pkg.attrib["name"]
900         if pkgname not in pkglist:
901             pkglist.append(pkgname)
902
903     return pkglist
904
905 def get_pkglist_in_comps(group, comps):
906     found = False
907     pkglist = []
908     try:
909         root = xmlparse(comps)
910     except SyntaxError:
911         raise SyntaxError("%s syntax error." % comps)
912
913     for elm in root.getiterator("group"):
914         id = elm.find("id")
915         name = elm.find("name")
916         if id.text == group or name.text == group:
917             packagelist = elm.find("packagelist")
918             found = True
919             break
920
921     if not found:
922         return pkglist
923
924     for require in elm.getiterator("packagereq"):
925         if require.tag.endswith("packagereq"):
926             pkgname = require.text
927         if pkgname not in pkglist:
928             pkglist.append(pkgname)
929
930     return pkglist
931
932 def is_statically_linked(binary):
933     ret = False
934     dev_null = os.open("/dev/null", os.O_WRONLY)
935     filecmd = find_binary_path("file")
936     args = [ filecmd, binary ]
937     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
938     output = file.communicate()[0]
939     os.close(dev_null)
940     if output.find(", statically linked, ") > 0:
941         ret = True
942     return ret
943
944 def setup_qemu_emulator(rootdir, arch):
945     # mount binfmt_misc if it doesn't exist
946     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
947         modprobecmd = find_binary_path("modprobe")
948         subprocess.call([modprobecmd, "binfmt_misc"])
949     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
950         mountcmd = find_binary_path("mount")
951         subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
952
953     # qemu_emulator is a special case, we can't use find_binary_path
954     # qemu emulator should be a statically-linked executable file
955     qemu_emulator = "/usr/bin/qemu-arm"
956     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
957         qemu_emulator = "/usr/bin/qemu-arm-static"
958     if not os.path.exists(qemu_emulator):
959         raise CreatorError("Please install a statically-linked qemu-arm")
960     if not os.path.exists(rootdir + "/usr/bin"):
961         makedirs(rootdir + "/usr/bin")
962     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
963
964     # disable selinux, selinux will block qemu emulator to run
965     if os.path.exists("/usr/sbin/setenforce"):
966         subprocess.call(["/usr/sbin/setenforce", "0"])
967
968     node = "/proc/sys/fs/binfmt_misc/arm"
969     if is_statically_linked(qemu_emulator) and os.path.exists(node):
970         return qemu_emulator
971
972     # unregister it if it has been registered and is a dynamically-linked executable
973     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
974         qemu_unregister_string = "-1\n"
975         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
976         fd.write(qemu_unregister_string)
977         fd.close()
978
979     # register qemu emulator for interpreting other arch executable file
980     if not os.path.exists(node):
981         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
982         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
983         fd.write(qemu_arm_string)
984         fd.close()
985
986     return qemu_emulator
987
988 def create_release(config, destdir, name, outimages, release):
989     """ TODO: This functionality should really be in creator.py inside the
990     ImageCreator class. """
991
992     # For virtual machine images, we have a subdir for it, this is unnecessary
993     # for release
994     thatsubdir = None
995     for i in range(len(outimages)):
996         file = outimages[i]
997         if not os.path.isdir(file) and os.path.dirname(file) != destdir:
998             thatsubdir = os.path.dirname(file)
999             newfile = os.path.join(destdir, os.path.basename(file))
1000             shutil.move(file, newfile)
1001             outimages[i] = newfile
1002     if thatsubdir:
1003         shutil.rmtree(thatsubdir, ignore_errors = True)
1004
1005     """ Create release directory and files """
1006     os.system ("cp %s %s/%s.ks" % (config, destdir, name))
1007     # When building a release we want to make sure the .ks 
1008     # file generates the same build even when --release= is not used.
1009     fd = open(config, "r")
1010     kscont = fd.read()
1011     fd.close()
1012     kscont = kscont.replace("@BUILD_ID@",release)
1013     fd = open("%s/%s.ks" % (destdir,name), "w")
1014     fd.write(kscont)
1015     fd.close()
1016     outimages.append("%s/%s.ks" % (destdir,name))
1017
1018     # Using system + mv, because of * in filename.
1019     os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
1020     outimages.append("%s/%s.packages" % (destdir,name))
1021
1022     d = os.listdir(destdir)
1023     for f in d:
1024         if f.endswith(".iso"):
1025             ff = f.replace(".iso", ".img")
1026             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1027             outimages.append("%s/%s" %(destdir, ff))
1028         elif f.endswith(".usbimg"):
1029             ff = f.replace(".usbimg", ".img")
1030             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1031             outimages.append("%s/%s" %(destdir, ff))
1032
1033     fd = open(destdir + "/MANIFEST", "w")
1034     d = os.listdir(destdir)
1035     for f in d:
1036         if f == "MANIFEST":
1037             continue
1038         if os.path.exists("/usr/bin/md5sum"):
1039             p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
1040                              stdout=subprocess.PIPE)
1041             (md5sum, errorstr) = p.communicate()
1042             if p.returncode != 0:
1043                 logging.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
1044             else:
1045                 md5sum = md5sum.split(" ")[0]
1046                 fd.write(md5sum+" "+f+"\n")
1047
1048     outimages.append("%s/MANIFEST" % destdir)
1049     fd.close()
1050
1051     """ Update the file list. """
1052     updated_list = []
1053     for file in outimages:
1054         if os.path.exists("%s" % file):
1055             updated_list.append(file)
1056
1057     return updated_list
1058
1059 def get_local_distro():
1060     print "Local linux distribution:"
1061     for file in glob.glob("/etc/*-release"):
1062         fd = open(file, "r")
1063         content = fd.read()
1064         fd.close()
1065         print content
1066     if os.path.exists("/etc/issue"):
1067         fd = open("/etc/issue", "r")
1068         content = fd.read()
1069         fd.close()
1070         print content
1071     print "Local Kernel version: " + os.uname()[2]
1072
1073 def check_mic_installation(argv):
1074     creator_name = os.path.basename(argv[0])
1075     if os.path.exists("/usr/local/bin/" + creator_name) \
1076         and os.path.exists("/usr/bin/" + creator_name):
1077         raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from  installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
1078
1079 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1080
1081     def get_source_repometadata(repometadata):
1082         src_repometadata=[]
1083         for repo in repometadata:
1084             if repo["name"].endswith("-source"):
1085                 src_repometadata.append(repo)
1086         if src_repometadata:
1087             return src_repometadata
1088         return None
1089
1090     def get_src_name(srpm):
1091         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
1092         if m:
1093             return m.group(1)
1094         return None    
1095
1096     src_repometadata = get_source_repometadata(repometadata)
1097
1098     if not src_repometadata:
1099         print "No source repo found"
1100         return None
1101
1102     src_pkgs = []
1103     lpkgs_dict = {}
1104     lpkgs_path = []
1105     for repo in src_repometadata:
1106         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1107         lpkgs_path += glob.glob(cachepath)
1108     
1109     for lpkg in lpkgs_path:
1110         lpkg_name = get_src_name(os.path.basename(lpkg))
1111         lpkgs_dict[lpkg_name] = lpkg
1112     localpkgs = lpkgs_dict.keys()
1113     
1114     cached_count = 0
1115     destdir = instroot+'/usr/src/SRPMS'
1116     if not os.path.exists(destdir):
1117         os.makedirs(destdir)
1118     
1119     srcpkgset = set()
1120     for _pkg in pkgs:
1121         srcpkg_name = get_source_name(_pkg, repometadata)
1122         if not srcpkg_name:
1123             return None
1124         srcpkgset.add(srcpkg_name)
1125     
1126     for pkg in list(srcpkgset):
1127         if pkg in localpkgs:
1128             cached_count += 1
1129             shutil.copy(lpkgs_dict[pkg], destdir)
1130             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1131         else:
1132             src_pkg = get_package(pkg, src_repometadata, 'src')
1133             if src_pkg:
1134                 shutil.copy(src_pkg, destdir)            
1135                 src_pkgs.append(src_pkg)
1136     print '--------------------------------------------------'
1137     print "%d source packages gotten from cache" %cached_count
1138
1139     return src_pkgs
1140
1141 def add_optparser(arg):
1142     def decorate(f):
1143         if not hasattr(f, "optparser"):
1144             f.optparser = arg
1145         return f
1146     return decorate