Merge "misc.py: remove few unnecessary checks" into devel
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 import os
19 import sys
20 import tempfile
21 import re
22 import shutil
23 import glob
24 import hashlib
25 import subprocess
26 import platform
27 import rpmmisc
28 from hashlib import md5
29 import sqlite3 as sqlite
30
31 from xml.etree import cElementTree
32 xmlparse = cElementTree.parse
33
34 from errors import *
35 from fs_related import *
36 from rpmmisc import myurlgrab
37 from proxy import get_proxy_for
38 import runner
39
40 from mic import msger
41
42 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
43 RPM_FMT = "%(name)s.%(arch)s %(ver_rel)s"
44 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
45
46 def get_distro():
47     """Detect linux distribution, support "meego"
48     """
49
50     support_dists = ('SuSE',
51                      'debian',
52                      'fedora',
53                      'redhat',
54                      'centos',
55                      'meego',
56                      'moblin',
57                      'tizen')
58     try:
59         (dist, ver, id) = platform.linux_distribution( \
60                               supported_dists = support_dists)
61     except:
62         (dist, ver, id) = platform.dist( \
63                               supported_dists = support_dists)
64
65     return (dist, ver, id)
66
67 def get_distro_str():
68     """Get composited string for current linux distribution
69     """
70     (dist, ver, id) = get_distro()
71
72     if not dist:
73         return 'Unknown Linux Distro'
74     else:
75         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
76         return distro_str.strip()
77
78 _LOOP_RULE_PTH = None
79 def hide_loopdev_presentation():
80     udev_rules = "80-prevent-loop-present.rules"
81     udev_rules_dir = [
82                        '/usr/lib/udev/rules.d/',
83                        '/lib/udev/rules.d/',
84                        '/etc/udev/rules.d/'
85                      ]
86
87     for rdir in udev_rules_dir:
88         if os.path.exists(rdir):
89             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
90
91     if not _LOOP_RULE_PTH:
92         return
93
94     try:
95         with open(_LOOP_RULE_PTH, 'w') as wf:
96             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
97
98         runner.quiet('udevadm trigger')
99     except:
100         pass
101
102 def unhide_loopdev_presentation():
103     if not _LOOP_RULE_PTH:
104         return
105
106     try:
107         os.unlink(_LOOP_RULE_PTH)
108         runner.quiet('udevadm trigger')
109     except:
110         pass
111
112 def extract_rpm(rpmfile, targetdir):
113     rpm2cpio = find_binary_path("rpm2cpio")
114     cpio = find_binary_path("cpio")
115
116     olddir = os.getcwd()
117     os.chdir(targetdir)
118
119     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
120     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
121     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
122                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
123     (sout, serr) = p2.communicate()
124     msger.verbose(sout or serr)
125
126     os.chdir(olddir)
127
128 def compressing(fpath, method):
129     comp_map = {
130         "gz": "gzip",
131         "bz2": "bzip2"
132     }
133     if method not in comp_map:
134         raise CreatorError("Unsupport compress format: %s, valid values: %s"
135                            % (method, ','.join(comp_map.keys())))
136     cmd = find_binary_path(comp_map[method])
137     rc = runner.show([cmd, "-f", fpath])
138     if rc:
139         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
140
141 def taring(dstfile, target):
142     import tarfile
143     basen, ext = os.path.splitext(dstfile)
144     comp = {".tar": None,
145             ".gz": "gz", # for .tar.gz
146             ".bz2": "bz2", # for .tar.bz2
147             ".tgz": "gz",
148             ".tbz": "bz2"}[ext]
149
150     # specify tarball file path
151     if not comp:
152         tarpath = dstfile
153     elif basen.endswith(".tar"):
154         tarpath = basen
155     else:
156         tarpath = basen + ".tar"
157     wf = tarfile.open(tarpath, 'w')
158
159     if os.path.isdir(target):
160         for item in os.listdir(target):
161             wf.add(os.path.join(target, item), item)
162     else:
163         wf.add(target, os.path.basename(target))
164     wf.close()
165
166     if comp:
167         compressing(tarpath, comp)
168         # when dstfile ext is ".tgz" and ".tbz", should rename
169         if not basen.endswith(".tar"):
170             shutil.move("%s.%s" % (tarpath, comp), dstfile)
171
172 def ziping(dstfile, target):
173     import zipfile
174     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
175     if os.path.isdir(target):
176         for item in os.listdir(target):
177             fpath = os.path.join(target, item)
178             if not os.path.isfile(fpath):
179                 continue
180             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
181     else:
182         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
183     wf.close()
184
185 pack_formats = {
186     ".tar": taring,
187     ".tar.gz": taring,
188     ".tar.bz2": taring,
189     ".tgz": taring,
190     ".tbz": taring,
191     ".zip": ziping,
192 }
193
194 def packing(dstfile, target):
195     (base, ext) = os.path.splitext(dstfile)
196     if ext in (".gz", ".bz2") and base.endswith(".tar"):
197         ext = ".tar" + ext
198     if ext not in pack_formats:
199         raise CreatorError("Unsupport pack format: %s, valid values: %s"
200                            % (ext, ','.join(pack_formats.keys())))
201     func = pack_formats[ext]
202     # func should be callable
203     func(dstfile, target)
204
205 def human_size(size):
206     """Return human readable string for Bytes size
207     """
208
209     if size <= 0:
210         return "0M"
211     import math
212     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
213     expo = int(math.log(size, 1024))
214     mant = float(size/math.pow(1024, expo))
215     return "{0:.1f}{1:s}".format(mant, measure[expo])
216
217 def check_space_pre_cp(src, dst):
218     """Check whether disk space is enough before 'cp' like
219     operations, else exception will be raised.
220     """
221
222     srcsize  = get_file_size(src) * 1024 * 1024
223     freesize = get_filesystem_avail(dst)
224     if srcsize > freesize:
225         raise CreatorError("space on %s(%s) is not enough for about %s files"
226                            % (dst, human_size(freesize), human_size(srcsize)))
227
228 def get_md5sum(fpath):
229     blksize = 65536 # should be optimized enough
230
231     md5sum = md5()
232     with open(fpath, 'rb') as f:
233         while True:
234             data = f.read(blksize)
235             if not data:
236                 break
237             md5sum.update(data)
238     return md5sum.hexdigest()
239
240 def normalize_ksfile(ksconf, release, arch):
241     def _clrtempks():
242         try:
243             os.unlink(ksconf)
244         except:
245             pass
246
247     if not os.path.exists(ksconf):
248         return
249
250     if not release:
251         release = "latest"
252     if not arch or re.match(r'i.86', arch):
253         arch = "ia32"
254
255     with open(ksconf) as f:
256         ksc = f.read()
257
258     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
259         msger.info("Substitute macro variable @BUILD_ID@/@ARCH in ks: %s" % ksconf)
260         ksc = ksc.replace("@ARCH@", arch)
261         ksc = ksc.replace("@BUILD_ID@", release)
262         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
263         os.write(fd, ksc)
264         os.close(fd)
265
266         msger.debug('new ks path %s' % ksconf)
267
268         import atexit
269         atexit.register(_clrtempks)
270
271     return ksconf
272
273 def _check_mic_chroot(rootdir):
274     def _path(path):
275         return rootdir.rstrip('/') + path
276
277     release_files = map(_path, [ "/etc/moblin-release",
278                                  "/etc/meego-release",
279                                  "/etc/tizen-release"])
280
281     if not any(map(os.path.exists, release_files)):
282         msger.warning("Dir %s is not a MeeGo/Tizen chroot env")
283
284     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
285         msger.warning("Failed to find kernel module under %s" % rootdir)
286
287     return
288
289 def selinux_check(arch, fstypes):
290     try:
291         getenforce = find_binary_path('getenforce')
292     except CreatorError:
293         return
294
295     selinux_status = runner.outs([getenforce])
296     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
297         raise CreatorError("Can't create arm image if selinux is enabled, "
298                            "please run 'setenforce 0' to disable selinux")
299
300     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
301     if use_btrfs and selinux_status == "Enforcing":
302         raise CreatorError("Can't create btrfs image if selinux is enabled,"
303                            " please run 'setenforce 0' to disable selinux")
304
305 def get_image_type(path):
306     def _get_extension_name(path):
307         match = re.search("(?<=\.)\w+$", path)
308         if match:
309             return match.group(0)
310         else:
311             return None
312
313     if os.path.isdir(path):
314         _check_mic_chroot(path)
315         return "fs"
316
317     maptab = {
318               "tar": "loop",
319               "raw":"raw",
320               "vmdk":"vmdk",
321               "vdi":"vdi",
322               "iso":"livecd",
323               "usbimg":"liveusb",
324              }
325
326     extension = _get_extension_name(path)
327     if extension in maptab:
328         return maptab[extension]
329
330     fd = open(path, "rb")
331     file_header = fd.read(1024)
332     fd.close()
333     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
334     if file_header[0:len(vdi_flag)] == vdi_flag:
335         return maptab["vdi"]
336
337     output = runner.outs(['file', path])
338     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
339     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
340     rawptn = re.compile(r".*x86 boot sector.*")
341     vmdkptn = re.compile(r".*VMware. disk image.*")
342     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
343     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
344     btrfsimgptn = re.compile(r".*BTRFS.*")
345     if isoptn.match(output):
346         return maptab["iso"]
347     elif usbimgptn.match(output):
348         return maptab["usbimg"]
349     elif rawptn.match(output):
350         return maptab["raw"]
351     elif vmdkptn.match(output):
352         return maptab["vmdk"]
353     elif ext3fsimgptn.match(output):
354         return "ext3fsimg"
355     elif ext4fsimgptn.match(output):
356         return "ext4fsimg"
357     elif btrfsimgptn.match(output):
358         return "btrfsimg"
359     else:
360         raise CreatorError("Cannot detect the type of image: %s" % path)
361
362 def get_file_size(file):
363     """ Return size in MB unit """
364     rc, duOutput  = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
365     if rc != 0:
366         raise CreatorError("Failed to run %s" % du)
367
368     size1 = int(duOutput.split()[0])
369     rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
370     if rc != 0:
371         raise CreatorError("Failed to run %s" % du)
372
373     size2 = int(duOutput.split()[0])
374     if size1 > size2:
375         return size1
376     else:
377         return size2
378
379 def get_filesystem_avail(fs):
380     vfstat = os.statvfs(fs)
381     return vfstat.f_bavail * vfstat.f_bsize
382
383 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
384     #convert disk format
385     if dstfmt != "raw":
386         raise CreatorError("Invalid destination image format: %s" % dstfmt)
387     msger.debug("converting %s image to %s" % (srcimg, dstimg))
388     if srcfmt == "vmdk":
389         path = find_binary_path("qemu-img")
390         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
391     elif srcfmt == "vdi":
392         path = find_binary_path("VBoxManage")
393         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
394     else:
395         raise CreatorError("Invalid soure image format: %s" % srcfmt)
396
397     rc = runner.show(argv)
398     if rc == 0:
399         msger.debug("convert successful")
400     if rc != 0:
401         raise CreatorError("Unable to convert disk to %s" % dstfmt)
402
403 def uncompress_squashfs(squashfsimg, outdir):
404     """Uncompress file system from squshfs image"""
405     unsquashfs = find_binary_path("unsquashfs")
406     args = [ unsquashfs, "-d", outdir, squashfsimg ]
407     rc = runner.show(args)
408     if (rc != 0):
409         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
410
411 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
412     """ FIXME: use the dir in mic.conf instead """
413
414     makedirs(dir)
415     return tempfile.mkdtemp(dir = dir, prefix = prefix)
416
417 def get_repostrs_from_ks(ks):
418     def _get_temp_reponame(baseurl):
419         md5obj = hashlib.md5(baseurl)
420         tmpreponame = "%s" % md5obj.hexdigest()
421         return tmpreponame
422
423     kickstart_repos = []
424
425     for repodata in ks.handler.repo.repoList:
426         repo = {}
427         for attr in ('name',
428                      'baseurl',
429                      'mirrorlist',
430                      'includepkgs', # val is list
431                      'excludepkgs', # val is list
432                      'cost',    # int
433                      'priority',# int
434                      'save',
435                      'proxy',
436                      'proxyuser',
437                      'proxypasswd',
438                      'proxypasswd',
439                      'debuginfo',
440                      'source',
441                      'gpgkey',
442                      'ssl_verify'):
443             if hasattr(repodata, attr) and getattr(repodata, attr):
444                 repo[attr] = getattr(repodata, attr)
445
446         if 'name' not in repo:
447             repo['name'] = _get_temp_reponame(repodata.baseurl)
448
449         kickstart_repos.append(repo)
450
451     return kickstart_repos
452
453 def _get_uncompressed_data_from_url(url, filename, proxies):
454     filename = myurlgrab(url, filename, proxies)
455     suffix = None
456     if filename.endswith(".gz"):
457         suffix = ".gz"
458         runner.quiet(['gunzip', "-f", filename])
459     elif filename.endswith(".bz2"):
460         suffix = ".bz2"
461         runner.quiet(['bunzip2', "-f", filename])
462     if suffix:
463         filename = filename.replace(suffix, "")
464     return filename
465
466 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
467                             sumtype=None, checksum=None):
468     url = os.path.join(baseurl, filename)
469     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
470     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
471         filename = os.path.splitext(filename_tmp)[0]
472     else:
473         filename = filename_tmp
474     if sumtype and checksum and os.path.exists(filename):
475         try:
476             sumcmd = find_binary_path("%ssum" % sumtype)
477         except:
478             file_checksum = None
479         else:
480             file_checksum = runner.outs([sumcmd, filename]).split()[0]
481
482         if file_checksum and file_checksum == checksum:
483             return filename
484
485     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
486
487 def get_metadata_from_repos(repos, cachedir):
488     my_repo_metadata = []
489     for repo in repos:
490         reponame = repo['name']
491         baseurl  = repo['baseurl']
492
493
494         if 'proxy' in repo:
495             proxy = repo['proxy']
496         else:
497             proxy = get_proxy_for(baseurl)
498
499         proxies = None
500         if proxy:
501            proxies = {str(baseurl.split(":")[0]):str(proxy)}
502
503         makedirs(os.path.join(cachedir, reponame))
504         url = os.path.join(baseurl, "repodata/repomd.xml")
505         filename = os.path.join(cachedir, reponame, 'repomd.xml')
506         repomd = myurlgrab(url, filename, proxies)
507         try:
508             root = xmlparse(repomd)
509         except SyntaxError:
510             raise CreatorError("repomd.xml syntax error.")
511
512         ns = root.getroot().tag
513         ns = ns[0:ns.rindex("}")+1]
514
515         filepaths = {}
516         checksums = {}
517         sumtypes = {}
518
519         for elm in root.getiterator("%sdata" % ns):
520             if elm.attrib["type"] == "patterns":
521                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
522                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
523                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
524                 break
525
526         for elm in root.getiterator("%sdata" % ns):
527             if elm.attrib["type"] in ("group_gz", "group"):
528                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
529                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
530                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
531                 break
532
533         primary_type = None
534         for elm in root.getiterator("%sdata" % ns):
535             if elm.attrib["type"] in ("primary_db", "primary"):
536                 primary_type = elm.attrib["type"]
537                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
538                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
539                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
540                 break
541
542         if not primary_type:
543             continue
544
545         for item in ("primary", "patterns", "comps"):
546             if item not in filepaths:
547                 filepaths[item] = None
548                 continue
549             if not filepaths[item]:
550                 continue
551             filepaths[item] = _get_metadata_from_repo(baseurl,
552                                                       proxies,
553                                                       cachedir,
554                                                       reponame,
555                                                       filepaths[item],
556                                                       sumtypes[item],
557                                                       checksums[item])
558
559         """ Get repo key """
560         try:
561             repokey = _get_metadata_from_repo(baseurl,
562                                               proxies,
563                                               cachedir,
564                                               reponame,
565                                               "repodata/repomd.xml.key")
566         except CreatorError:
567             repokey = None
568             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
569
570         my_repo_metadata.append({"name":reponame,
571                                  "baseurl":baseurl,
572                                  "repomd":repomd,
573                                  "primary":filepaths['primary'],
574                                  "cachedir":cachedir,
575                                  "proxies":proxies,
576                                  "patterns":filepaths['patterns'],
577                                  "comps":filepaths['comps'],
578                                  "repokey":repokey})
579
580     return my_repo_metadata
581
582 def get_rpmver_in_repo(repometadata):
583     for repo in repometadata:
584         if repo["primary"].endswith(".xml"):
585             root = xmlparse(repo["primary"])
586             ns = root.getroot().tag
587             ns = ns[0:ns.rindex("}")+1]
588
589             versionlist = []
590             for elm in root.getiterator("%spackage" % ns):
591                 if elm.find("%sname" % ns).text == 'rpm':
592                     for node in elm.getchildren():
593                         if node.tag == "%sversion" % ns:
594                             versionlist.append(node.attrib['ver'])
595
596             if versionlist:
597                 return reversed(
598                          sorted(
599                            versionlist,
600                            key = lambda ver: map(int, ver.split('.')))).next()
601
602         elif repo["primary"].endswith(".sqlite"):
603             con = sqlite.connect(repo["primary"])
604             for row in con.execute("select version from packages where "
605                                    "name=\"rpm\" ORDER by version DESC"):
606                 con.close()
607                 return row[0]
608
609     return None
610
611 def get_arch(repometadata):
612     def uniqarch(archlist=[]):
613         uniq_arch = []
614         for i in range(len(archlist)):
615             if archlist[i] not in rpmmisc.archPolicies.keys():
616                 continue
617             need_append = True
618             j = 0
619             while j < len(uniq_arch):
620                 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
621                     need_append = False
622                     break
623                 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
624                     if need_append:
625                         uniq_arch[j] = archlist[i]
626                         need_append = False
627                     else:
628                         uniq_arch.remove(uniq_arch[j])
629                         continue
630                 j += 1
631             if need_append:
632                  uniq_arch.append(archlist[i])
633
634         return uniq_arch
635     
636
637     ret_uniq_arch = []
638     ret_arch_list = []
639     for repo in repometadata:
640         archlist = []
641         if repo["primary"].endswith(".xml"):
642             root = xmlparse(repo["primary"])
643             ns = root.getroot().tag
644             ns = ns[0:ns.rindex("}")+1]
645             for elm in root.getiterator("%spackage" % ns):
646                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
647                     arch = elm.find("%sarch" % ns).text
648                     if arch not in archlist:
649                         archlist.append(arch)
650         elif repo["primary"].endswith(".sqlite"):
651             con = sqlite.connect(repo["primary"])
652             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
653                 if row[0] not in archlist:
654                     archlist.append(row[0])
655
656             con.close()
657
658         uniq_arch = uniqarch(archlist)
659         if not ret_uniq_arch and len(uniq_arch) == 1:
660             ret_uniq_arch = uniq_arch 
661         ret_arch_list += uniq_arch
662
663     ret_arch_list = uniqarch(ret_arch_list)
664     return ret_uniq_arch, ret_arch_list
665
666 def get_package(pkg, repometadata, arch = None):
667     ver = ""
668     target_repo = None
669     if not arch:
670         arches = []
671     elif arch not in rpmmisc.archPolicies:
672         arches = [arch]
673     else:
674         arches = rpmmisc.archPolicies[arch].split(':')
675         arches.append('noarch')
676
677     for repo in repometadata:
678         if repo["primary"].endswith(".xml"):
679             root = xmlparse(repo["primary"])
680             ns = root.getroot().tag
681             ns = ns[0:ns.rindex("}")+1]
682             for elm in root.getiterator("%spackage" % ns):
683                 if elm.find("%sname" % ns).text == pkg:
684                     if elm.find("%sarch" % ns).text in arches:
685                         version = elm.find("%sversion" % ns)
686                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
687                         if tmpver > ver:
688                             ver = tmpver
689                             location = elm.find("%slocation" % ns)
690                             pkgpath = "%s" % location.attrib['href']
691                             target_repo = repo
692                         break
693         if repo["primary"].endswith(".sqlite"):
694             con = sqlite.connect(repo["primary"])
695             if arch:
696                 sql = 'select version, release, location_href from packages ' \
697                       'where name = "%s" and arch IN ("%s")' % \
698                       (pkg, '","'.join(arches))
699                 for row in con.execute(sql):
700                     tmpver = "%s-%s" % (row[0], row[1])
701                     if tmpver > ver:
702                         ver = tmpver
703                         pkgpath = "%s" % row[2]
704                         target_repo = repo
705                     break
706             else:
707                 sql = 'select version, release, location_href from packages ' \
708                       'where name = "%s"' % pkg
709                 for row in con.execute(sql):
710                     tmpver = "%s-%s" % (row[0], row[1])
711                     if tmpver > ver:
712                         ver = tmpver
713                         pkgpath = "%s" % row[2]
714                         target_repo = repo
715                     break
716             con.close()
717     if target_repo:
718         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
719         url = os.path.join(target_repo["baseurl"], pkgpath)
720         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
721         if os.path.exists(filename):
722             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
723             if ret == 0:
724                 return filename
725
726             msger.warning("package %s is damaged: %s" %
727                           (os.path.basename(filename), filename))
728             os.unlink(filename)
729
730         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
731         return pkg
732     else:
733         return None
734
735 def get_source_name(pkg, repometadata):
736
737     def get_bin_name(pkg):
738         m = RPM_RE.match(pkg)
739         if m:
740             return m.group(1)
741         return None
742
743     def get_src_name(srpm):
744         m = SRPM_RE.match(srpm)
745         if m:
746             return m.group(1)
747         return None
748
749     ver = ""
750     target_repo = None
751
752     pkg_name = get_bin_name(pkg)
753     if not pkg_name:
754         return None
755
756     for repo in repometadata:
757         if repo["primary"].endswith(".xml"):
758             root = xmlparse(repo["primary"])
759             ns = root.getroot().tag
760             ns = ns[0:ns.rindex("}")+1]
761             for elm in root.getiterator("%spackage" % ns):
762                 if elm.find("%sname" % ns).text == pkg_name:
763                     if elm.find("%sarch" % ns).text != "src":
764                         version = elm.find("%sversion" % ns)
765                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
766                         if tmpver > ver:
767                             ver = tmpver
768                             fmt = elm.find("%sformat" % ns)
769                             if fmt:
770                                 fns = fmt.getchildren()[0].tag
771                                 fns = fns[0:fns.rindex("}")+1]
772                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
773                                 target_repo = repo
774                         break
775
776         if repo["primary"].endswith(".sqlite"):
777             con = sqlite.connect(repo["primary"])
778             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
779                 tmpver = "%s-%s" % (row[0], row[1])
780                 if tmpver > ver:
781                     pkgpath = "%s" % row[2]
782                     target_repo = repo
783                 break
784             con.close()
785     if target_repo:
786         return get_src_name(pkgpath)
787     else:
788         return None
789
790 def get_pkglist_in_patterns(group, patterns):
791     found = False
792     pkglist = []
793     try:
794         root = xmlparse(patterns)
795     except SyntaxError:
796         raise SyntaxError("%s syntax error." % patterns)
797
798     for elm in list(root.getroot()):
799         ns = elm.tag
800         ns = ns[0:ns.rindex("}")+1]
801         name = elm.find("%sname" % ns)
802         summary = elm.find("%ssummary" % ns)
803         if name.text == group or summary.text == group:
804             found = True
805             break
806
807     if not found:
808         return pkglist
809
810     found = False
811     for requires in list(elm):
812         if requires.tag.endswith("requires"):
813             found = True
814             break
815
816     if not found:
817         return pkglist
818
819     for pkg in list(requires):
820         pkgname = pkg.attrib["name"]
821         if pkgname not in pkglist:
822             pkglist.append(pkgname)
823
824     return pkglist
825
826 def get_pkglist_in_comps(group, comps):
827     found = False
828     pkglist = []
829     try:
830         root = xmlparse(comps)
831     except SyntaxError:
832         raise SyntaxError("%s syntax error." % comps)
833
834     for elm in root.getiterator("group"):
835         id = elm.find("id")
836         name = elm.find("name")
837         if id.text == group or name.text == group:
838             packagelist = elm.find("packagelist")
839             found = True
840             break
841
842     if not found:
843         return pkglist
844
845     for require in elm.getiterator("packagereq"):
846         if require.tag.endswith("packagereq"):
847             pkgname = require.text
848         if pkgname not in pkglist:
849             pkglist.append(pkgname)
850
851     return pkglist
852
853 def is_statically_linked(binary):
854     return ", statically linked, " in runner.outs(['file', binary])
855
856 def setup_qemu_emulator(rootdir, arch):
857     # mount binfmt_misc if it doesn't exist
858     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
859         modprobecmd = find_binary_path("modprobe")
860         runner.show([modprobecmd, "binfmt_misc"])
861     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
862         mountcmd = find_binary_path("mount")
863         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
864
865     # qemu_emulator is a special case, we can't use find_binary_path
866     # qemu emulator should be a statically-linked executable file
867     qemu_emulator = "/usr/bin/qemu-arm"
868     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
869         qemu_emulator = "/usr/bin/qemu-arm-static"
870     if not os.path.exists(qemu_emulator):
871         raise CreatorError("Please install a statically-linked qemu-arm")
872
873     # qemu emulator version check
874     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
875     if arch in armv7_list:  # need qemu (>=0.13.0)
876         qemuout = runner.outs([qemu_emulator, "-h"])
877         m = re.search("version\s*([.\d]+)", qemuout)
878         if m:
879             qemu_version = m.group(1)
880             if qemu_version < "0.13":
881                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
882         else:
883             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
884
885     if not os.path.exists(rootdir + "/usr/bin"):
886         makedirs(rootdir + "/usr/bin")
887     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
888
889     # disable selinux, selinux will block qemu emulator to run
890     if os.path.exists("/usr/sbin/setenforce"):
891         msger.info('Try to disable selinux')
892         runner.show(["/usr/sbin/setenforce", "0"])
893
894     node = "/proc/sys/fs/binfmt_misc/arm"
895     if is_statically_linked(qemu_emulator) and os.path.exists(node):
896         return qemu_emulator
897
898     # unregister it if it has been registered and is a dynamically-linked executable
899     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
900         qemu_unregister_string = "-1\n"
901         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
902         fd.write(qemu_unregister_string)
903         fd.close()
904
905     # register qemu emulator for interpreting other arch executable file
906     if not os.path.exists(node):
907         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
908         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
909         fd.write(qemu_arm_string)
910         fd.close()
911
912     return qemu_emulator
913
914 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
915     def get_source_repometadata(repometadata):
916         src_repometadata=[]
917         for repo in repometadata:
918             if repo["name"].endswith("-source"):
919                 src_repometadata.append(repo)
920         if src_repometadata:
921             return src_repometadata
922         return None
923
924     def get_src_name(srpm):
925         m = SRPM_RE.match(srpm)
926         if m:
927             return m.group(1)
928         return None
929
930     src_repometadata = get_source_repometadata(repometadata)
931
932     if not src_repometadata:
933         msger.warning("No source repo found")
934         return None
935
936     src_pkgs = []
937     lpkgs_dict = {}
938     lpkgs_path = []
939     for repo in src_repometadata:
940         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
941         lpkgs_path += glob.glob(cachepath)
942
943     for lpkg in lpkgs_path:
944         lpkg_name = get_src_name(os.path.basename(lpkg))
945         lpkgs_dict[lpkg_name] = lpkg
946     localpkgs = lpkgs_dict.keys()
947
948     cached_count = 0
949     destdir = instroot+'/usr/src/SRPMS'
950     if not os.path.exists(destdir):
951         os.makedirs(destdir)
952
953     srcpkgset = set()
954     for _pkg in pkgs:
955         srcpkg_name = get_source_name(_pkg, repometadata)
956         if not srcpkg_name:
957             continue
958         srcpkgset.add(srcpkg_name)
959
960     for pkg in list(srcpkgset):
961         if pkg in localpkgs:
962             cached_count += 1
963             shutil.copy(lpkgs_dict[pkg], destdir)
964             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
965         else:
966             src_pkg = get_package(pkg, src_repometadata, 'src')
967             if src_pkg:
968                 shutil.copy(src_pkg, destdir)
969                 src_pkgs.append(src_pkg)
970     msger.info("%d source packages gotten from cache" % cached_count)
971
972     return src_pkgs
973
974 def strip_end(text, suffix):
975     if not text.endswith(suffix):
976         return text
977     return text[:-len(suffix)]