e7a3f5ccac441fccde35c3008a7e144c71b33ee9
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import tempfile
22 import re
23 import shutil
24 import glob
25 import hashlib
26 import subprocess
27 import platform
28 import rpmmisc
29
30 try:
31     from hashlib import md5
32 except ImportError:
33     from md5 import md5
34
35 try:
36     import sqlite3 as sqlite
37 except ImportError:
38     import sqlite
39
40 try:
41     from xml.etree import cElementTree
42 except ImportError:
43     import cElementTree
44 xmlparse = cElementTree.parse
45
46 from errors import *
47 from fs_related import *
48 from rpmmisc import myurlgrab
49 from proxy import get_proxy_for
50 import runner
51
52 from mic import msger
53
54 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(ver_rel)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
57
58 def get_distro():
59     """Detect linux distribution, support "meego"
60     """
61
62     support_dists = ('SuSE',
63                      'debian',
64                      'fedora',
65                      'redhat',
66                      'centos',
67                      'meego',
68                      'moblin',
69                      'tizen')
70     try:
71         (dist, ver, id) = platform.linux_distribution( \
72                               supported_dists = support_dists)
73     except:
74         (dist, ver, id) = platform.dist( \
75                               supported_dists = support_dists)
76
77     return (dist, ver, id)
78
79 def get_distro_str():
80     """Get composited string for current linux distribution
81     """
82     (dist, ver, id) = get_distro()
83
84     if not dist:
85         return 'Unknown Linux Distro'
86     else:
87         return ' '.join(map(str.strip, (dist, ver, id)))
88
89 _LOOP_RULE_PTH = "/etc/udev/rules.d/80-prevent-loop-present.rules"
90 def hide_loopdev_presentation():
91     try:
92         with open(_LOOP_RULE_PTH, 'w') as wf:
93             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
94
95         runner.quiet('udevadm trigger')
96     except:
97         pass
98
99 def unhide_loopdev_presentation():
100     try:
101         os.unlink(_LOOP_RULE_PTH)
102         runner.quiet('udevadm trigger')
103     except:
104         pass
105
106 def extract_rpm(rpmfile, targetdir):
107     rpm2cpio = find_binary_path("rpm2cpio")
108     cpio = find_binary_path("cpio")
109
110     olddir = os.getcwd()
111     os.chdir(targetdir)
112
113     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
114     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
115     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
116                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
117     (sout, serr) = p2.communicate()
118     msger.verbose(sout or serr)
119
120     os.chdir(olddir)
121
122 def compressing(fpath, method):
123     comp_map = {
124         "gz": "gzip",
125         "bz2": "bzip2"
126     }
127     if method not in comp_map:
128         raise CreatorError("Unsupport compress format: %s, valid values: %s"
129                            % (method, ','.join(comp_map.keys())))
130     cmd = find_binary_path(comp_map[method])
131     rc = runner.show([cmd, "-f", fpath])
132     if rc:
133         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
134
135 def taring(dstfile, target):
136     import tarfile
137     basen, ext = os.path.splitext(dstfile)
138     comp = {".tar": None,
139             ".gz": "gz", # for .tar.gz
140             ".bz2": "bz2", # for .tar.bz2
141             ".tgz": "gz",
142             ".tbz": "bz2"}[ext]
143
144     # specify tarball file path
145     if not comp:
146         tarpath = dstfile
147     elif basen.endswith(".tar"):
148         tarpath = basen
149     else:
150         tarpath = basen + ".tar"
151     wf = tarfile.open(tarpath, 'w')
152
153     if os.path.isdir(target):
154         for item in os.listdir(target):
155             wf.add(os.path.join(target, item), item)
156     else:
157         wf.add(target, os.path.basename(target))
158     wf.close()
159
160     if comp:
161         compressing(tarpath, comp)
162         # when dstfile ext is ".tgz" and ".tbz", should rename
163         if not basen.endswith(".tar"):
164             shutil.move("%s.%s" % (tarpath, comp), dstfile)
165
166 def ziping(dstfile, target):
167     import zipfile
168     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
169     if os.path.isdir(target):
170         for item in os.listdir(target):
171             fpath = os.path.join(target, item)
172             if not os.path.isfile(fpath):
173                 continue
174             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
175     else:
176         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
177     wf.close()
178
179 pack_formats = {
180     ".tar": taring,
181     ".tar.gz": taring,
182     ".tar.bz2": taring,
183     ".tgz": taring,
184     ".tbz": taring,
185     ".zip": ziping,
186 }
187
188 def packing(dstfile, target):
189     (base, ext) = os.path.splitext(dstfile)
190     if ext in (".gz", ".bz2") and base.endswith(".tar"):
191         ext = ".tar" + ext
192     if ext not in pack_formats:
193         raise CreatorError("Unsupport pack format: %s, valid values: %s"
194                            % (ext, ','.join(pack_formats.keys())))
195     func = pack_formats[ext]
196     # func should be callable
197     func(dstfile, target)
198
199 def human_size(size):
200     """Return human readable string for Bytes size
201     """
202
203     if size <= 0:
204         return "0M"
205     import math
206     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
207     expo = int(math.log(size, 1024))
208     mant = float(size/math.pow(1024, expo))
209     return "{0:.1f}{1:s}".format(mant, measure[expo])
210
211 def check_space_pre_cp(src, dst):
212     """Check whether disk space is enough before 'cp' like
213     operations, else exception will be raised.
214     """
215
216     srcsize  = get_file_size(src) * 1024 * 1024
217     freesize = get_filesystem_avail(dst)
218     if srcsize > freesize:
219         raise CreatorError("space on %s(%s) is not enough for about %s files"
220                            % (dst, human_size(freesize), human_size(srcsize)))
221
222 def get_md5sum(fpath):
223     blksize = 65536 # should be optimized enough
224
225     md5sum = md5()
226     with open(fpath, 'rb') as f:
227         while True:
228             data = f.read(blksize)
229             if not data:
230                 break
231             md5sum.update(data)
232     return md5sum.hexdigest()
233
234 def normalize_ksfile(ksconf, release, arch):
235     def _clrtempks():
236         try:
237             os.unlink(ksconf)
238         except:
239             pass
240
241     if not os.path.exists(ksconf):
242         return
243
244     if not release:
245         release = "latest"
246     if not arch or re.match(r'i.86', arch):
247         arch = "ia32"
248
249     with open(ksconf) as f:
250         ksc = f.read()
251
252     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
253         msger.info("Substitute macro variable @BUILD_ID@/@ARCH in ks: %s" % ksconf)
254         ksc = ksc.replace("@ARCH@", arch)
255         ksc = ksc.replace("@BUILD_ID@", release)
256         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
257         os.write(fd, ksc)
258         os.close(fd)
259
260         msger.debug('new ks path %s' % ksconf)
261
262         import atexit
263         atexit.register(_clrtempks)
264
265     return ksconf
266
267 def _check_meego_chroot(rootdir):
268     if not os.path.exists(rootdir + "/etc/moblin-release") and \
269        not os.path.exists(rootdir + "/etc/meego-release") and \
270        not os.path.exists(rootdir + "/etc/tizen-release"):
271         raise CreatorError("Directory %s is not a MeeGo/Tizen chroot env"\
272                            % rootdir)
273
274     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
275         raise CreatorError("Failed to find kernel module under %s" % rootdir)
276
277     return
278
279 def selinux_check(arch, fstypes):
280     try:
281         getenforce = find_binary_path('getenforce')
282     except CreatorError:
283         return
284
285     selinux_status = runner.outs([getenforce])
286     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
287         raise CreatorError("Can't create arm image if selinux is enabled, "
288                            "please run 'setenforce 0' to disable selinux")
289
290     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
291     if use_btrfs and selinux_status == "Enforcing":
292         raise CreatorError("Can't create btrfs image if selinux is enabled,"
293                            " please run 'setenforce 0' to disable selinux")
294
295 def get_image_type(path):
296     def _get_extension_name(path):
297         match = re.search("(?<=\.)\w+$", path)
298         if match:
299             return match.group(0)
300         else:
301             return None
302
303     if os.path.isdir(path):
304         _check_meego_chroot(path)
305         return "fs"
306
307     maptab = {
308               "tar": "loop",
309               "raw":"raw",
310               "vmdk":"vmdk",
311               "vdi":"vdi",
312               "iso":"livecd",
313               "usbimg":"liveusb",
314              }
315
316     extension = _get_extension_name(path)
317     if extension in maptab:
318         return maptab[extension]
319
320     fd = open(path, "rb")
321     file_header = fd.read(1024)
322     fd.close()
323     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
324     if file_header[0:len(vdi_flag)] == vdi_flag:
325         return maptab["vdi"]
326
327     output = runner.outs(['file', path])
328     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
329     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
330     rawptn = re.compile(r".*x86 boot sector.*")
331     vmdkptn = re.compile(r".*VMware. disk image.*")
332     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
333     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
334     btrfsimgptn = re.compile(r".*BTRFS.*")
335     if isoptn.match(output):
336         return maptab["iso"]
337     elif usbimgptn.match(output):
338         return maptab["usbimg"]
339     elif rawptn.match(output):
340         return maptab["raw"]
341     elif vmdkptn.match(output):
342         return maptab["vmdk"]
343     elif ext3fsimgptn.match(output):
344         return "ext3fsimg"
345     elif ext4fsimgptn.match(output):
346         return "ext4fsimg"
347     elif btrfsimgptn.match(output):
348         return "btrfsimg"
349     else:
350         raise CreatorError("Cannot detect the type of image: %s" % path)
351
352 def get_file_size(file):
353     """ Return size in MB unit """
354     rc, duOutput  = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
355     if rc != 0:
356         raise CreatorError("Failed to run %s" % du)
357
358     size1 = int(duOutput.split()[0])
359     rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
360     if rc != 0:
361         raise CreatorError("Failed to run %s" % du)
362
363     size2 = int(duOutput.split()[0])
364     if size1 > size2:
365         return size1
366     else:
367         return size2
368
369 def get_filesystem_avail(fs):
370     vfstat = os.statvfs(fs)
371     return vfstat.f_bavail * vfstat.f_bsize
372
373 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
374     #convert disk format
375     if dstfmt != "raw":
376         raise CreatorError("Invalid destination image format: %s" % dstfmt)
377     msger.debug("converting %s image to %s" % (srcimg, dstimg))
378     if srcfmt == "vmdk":
379         path = find_binary_path("qemu-img")
380         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
381     elif srcfmt == "vdi":
382         path = find_binary_path("VBoxManage")
383         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
384     else:
385         raise CreatorError("Invalid soure image format: %s" % srcfmt)
386
387     rc = runner.show(argv)
388     if rc == 0:
389         msger.debug("convert successful")
390     if rc != 0:
391         raise CreatorError("Unable to convert disk to %s" % dstfmt)
392
393 def uncompress_squashfs(squashfsimg, outdir):
394     """Uncompress file system from squshfs image"""
395     unsquashfs = find_binary_path("unsquashfs")
396     args = [ unsquashfs, "-d", outdir, squashfsimg ]
397     rc = runner.show(args)
398     if (rc != 0):
399         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
400
401 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
402     """ FIXME: use the dir in mic.conf instead """
403
404     makedirs(dir)
405     return tempfile.mkdtemp(dir = dir, prefix = prefix)
406
407 def get_repostrs_from_ks(ks):
408     def _get_temp_reponame(baseurl):
409         md5obj = hashlib.md5(baseurl)
410         tmpreponame = "%s" % md5obj.hexdigest()
411         return tmpreponame
412
413     kickstart_repos = []
414
415     for repodata in ks.handler.repo.repoList:
416         repo = {}
417         for attr in ('name',
418                      'baseurl',
419                      'mirrorlist',
420                      'includepkgs', # val is list
421                      'excludepkgs', # val is list
422                      'cost',    # int
423                      'priority',# int
424                      'save',
425                      'proxy',
426                      'proxyuser',
427                      'proxypasswd',
428                      'proxypasswd',
429                      'debuginfo',
430                      'source',
431                      'gpgkey',
432                      'ssl_verify'):
433             if hasattr(repodata, attr) and getattr(repodata, attr):
434                 repo[attr] = getattr(repodata, attr)
435
436         if 'name' not in repo:
437             repo['name'] = _get_temp_reponame(repodata.baseurl)
438
439         kickstart_repos.append(repo)
440
441     return kickstart_repos
442
443 def _get_uncompressed_data_from_url(url, filename, proxies):
444     filename = myurlgrab(url, filename, proxies)
445     suffix = None
446     if filename.endswith(".gz"):
447         suffix = ".gz"
448         runner.quiet(['gunzip', "-f", filename])
449     elif filename.endswith(".bz2"):
450         suffix = ".bz2"
451         runner.quiet(['bunzip2', "-f", filename])
452     if suffix:
453         filename = filename.replace(suffix, "")
454     return filename
455
456 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
457                             sumtype=None, checksum=None):
458     url = os.path.join(baseurl, filename)
459     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
460     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
461         filename = os.path.splitext(filename_tmp)[0]
462     else:
463         filename = filename_tmp
464     if sumtype and checksum and os.path.exists(filename):
465         try:
466             sumcmd = find_binary_path("%ssum" % sumtype)
467         except:
468             file_checksum = None
469         else:
470             file_checksum = runner.outs([sumcmd, filename]).split()[0]
471
472         if file_checksum and file_checksum == checksum:
473             return filename
474
475     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
476
477 def get_metadata_from_repos(repos, cachedir):
478     my_repo_metadata = []
479     for repo in repos:
480         reponame = repo['name']
481         baseurl  = repo['baseurl']
482
483
484         if 'proxy' in repo:
485             proxy = repo['proxy']
486         else:
487             proxy = get_proxy_for(baseurl)
488
489         proxies = None
490         if proxy:
491            proxies = {str(baseurl.split(":")[0]):str(proxy)}
492
493         makedirs(os.path.join(cachedir, reponame))
494         url = os.path.join(baseurl, "repodata/repomd.xml")
495         filename = os.path.join(cachedir, reponame, 'repomd.xml')
496         repomd = myurlgrab(url, filename, proxies)
497         try:
498             root = xmlparse(repomd)
499         except SyntaxError:
500             raise CreatorError("repomd.xml syntax error.")
501
502         ns = root.getroot().tag
503         ns = ns[0:ns.rindex("}")+1]
504
505         filepaths = {}
506         checksums = {}
507         sumtypes = {}
508
509         for elm in root.getiterator("%sdata" % ns):
510             if elm.attrib["type"] == "patterns":
511                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
512                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
513                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
514                 break
515
516         for elm in root.getiterator("%sdata" % ns):
517             if elm.attrib["type"] in ("group_gz", "group"):
518                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
519                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
520                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
521                 break
522
523         primary_type = None
524         for elm in root.getiterator("%sdata" % ns):
525             if elm.attrib["type"] in ("primary_db", "primary"):
526                 primary_type = elm.attrib["type"]
527                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
528                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
529                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
530                 break
531
532         if not primary_type:
533             continue
534
535         for item in ("primary", "patterns", "comps"):
536             if item not in filepaths:
537                 filepaths[item] = None
538                 continue
539             if not filepaths[item]:
540                 continue
541             filepaths[item] = _get_metadata_from_repo(baseurl,
542                                                       proxies,
543                                                       cachedir,
544                                                       reponame,
545                                                       filepaths[item],
546                                                       sumtypes[item],
547                                                       checksums[item])
548
549         """ Get repo key """
550         try:
551             repokey = _get_metadata_from_repo(baseurl,
552                                               proxies,
553                                               cachedir,
554                                               reponame,
555                                               "repodata/repomd.xml.key")
556         except CreatorError:
557             repokey = None
558             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
559
560         my_repo_metadata.append({"name":reponame,
561                                  "baseurl":baseurl,
562                                  "repomd":repomd,
563                                  "primary":filepaths['primary'],
564                                  "cachedir":cachedir,
565                                  "proxies":proxies,
566                                  "patterns":filepaths['patterns'],
567                                  "comps":filepaths['comps'],
568                                  "repokey":repokey})
569
570     return my_repo_metadata
571
572 def get_rpmver_in_repo(repometadata):
573     for repo in repometadata:
574         if repo["primary"].endswith(".xml"):
575             root = xmlparse(repo["primary"])
576             ns = root.getroot().tag
577             ns = ns[0:ns.rindex("}")+1]
578
579             versionlist = []
580             for elm in root.getiterator("%spackage" % ns):
581                 if elm.find("%sname" % ns).text == 'rpm':
582                     for node in elm.getchildren():
583                         if node.tag == "%sversion" % ns:
584                             versionlist.append(node.attrib['ver'])
585
586             if versionlist:
587                 return reversed(
588                          sorted(
589                            versionlist,
590                            key = lambda ver: map(int, ver.split('.')))).next()
591
592         elif repo["primary"].endswith(".sqlite"):
593             con = sqlite.connect(repo["primary"])
594             for row in con.execute("select version from packages where "
595                                    "name=\"rpm\" ORDER by version DESC"):
596                 con.close()
597                 return row[0]
598
599     return None
600
601 def get_arch(repometadata):
602     archlist = []
603     for repo in repometadata:
604         if repo["primary"].endswith(".xml"):
605             root = xmlparse(repo["primary"])
606             ns = root.getroot().tag
607             ns = ns[0:ns.rindex("}")+1]
608             for elm in root.getiterator("%spackage" % ns):
609                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
610                     arch = elm.find("%sarch" % ns).text
611                     if arch not in archlist:
612                         archlist.append(arch)
613         elif repo["primary"].endswith(".sqlite"):
614             con = sqlite.connect(repo["primary"])
615             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
616                 if row[0] not in archlist:
617                     archlist.append(row[0])
618
619             con.close()
620
621     uniq_arch = []
622     for i in range(len(archlist)):
623         if archlist[i] not in rpmmisc.archPolicies.keys():
624             continue
625         need_append = True
626         j = 0
627         while j < len(uniq_arch):
628             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
629                 need_append = False
630                 break
631             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
632                 if need_append:
633                     uniq_arch[j] = archlist[i]
634                     need_append = False
635                 else:
636                     uniq_arch.remove(uniq_arch[j])
637                     continue
638             j += 1
639         if need_append:
640              uniq_arch.append(archlist[i])
641
642     return uniq_arch, archlist
643
644 def get_package(pkg, repometadata, arch = None):
645     ver = ""
646     target_repo = None
647     for repo in repometadata:
648         if repo["primary"].endswith(".xml"):
649             root = xmlparse(repo["primary"])
650             ns = root.getroot().tag
651             ns = ns[0:ns.rindex("}")+1]
652             for elm in root.getiterator("%spackage" % ns):
653                 if elm.find("%sname" % ns).text == pkg:
654                     if elm.find("%sarch" % ns).text != "src":
655                         version = elm.find("%sversion" % ns)
656                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
657                         if tmpver > ver:
658                             ver = tmpver
659                             location = elm.find("%slocation" % ns)
660                             pkgpath = "%s" % location.attrib['href']
661                             target_repo = repo
662                         break
663         if repo["primary"].endswith(".sqlite"):
664             con = sqlite.connect(repo["primary"])
665             if not arch:
666                 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
667                     tmpver = "%s-%s" % (row[0], row[1])
668                     if tmpver > ver:
669                         pkgpath = "%s" % row[2]
670                         target_repo = repo
671                     break
672             else:
673                 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
674                     tmpver = "%s-%s" % (row[0], row[1])
675                     if tmpver > ver:
676                         pkgpath = "%s" % row[2]
677                         target_repo = repo
678                     break
679             con.close()
680     if target_repo:
681         makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
682         url = os.path.join(target_repo["baseurl"], pkgpath)
683         filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
684         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
685         return pkg
686     else:
687         return None
688
689 def get_source_name(pkg, repometadata):
690
691     def get_bin_name(pkg):
692         m = RPM_RE.match(pkg)
693         if m:
694             return m.group(1)
695         return None
696
697     def get_src_name(srpm):
698         m = SRPM_RE.match(srpm)
699         if m:
700             return m.group(1)
701         return None
702
703     ver = ""
704     target_repo = None
705
706     pkg_name = get_bin_name(pkg)
707     if not pkg_name:
708         return None
709
710     for repo in repometadata:
711         if repo["primary"].endswith(".xml"):
712             root = xmlparse(repo["primary"])
713             ns = root.getroot().tag
714             ns = ns[0:ns.rindex("}")+1]
715             for elm in root.getiterator("%spackage" % ns):
716                 if elm.find("%sname" % ns).text == pkg_name:
717                     if elm.find("%sarch" % ns).text != "src":
718                         version = elm.find("%sversion" % ns)
719                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
720                         if tmpver > ver:
721                             ver = tmpver
722                             fmt = elm.find("%sformat" % ns)
723                             if fmt:
724                                 fns = fmt.getchildren()[0].tag
725                                 fns = fns[0:fns.rindex("}")+1]
726                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
727                                 target_repo = repo
728                         break
729
730         if repo["primary"].endswith(".sqlite"):
731             con = sqlite.connect(repo["primary"])
732             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
733                 tmpver = "%s-%s" % (row[0], row[1])
734                 if tmpver > ver:
735                     pkgpath = "%s" % row[2]
736                     target_repo = repo
737                 break
738             con.close()
739     if target_repo:
740         return get_src_name(pkgpath)
741     else:
742         return None
743
744 def get_pkglist_in_patterns(group, patterns):
745     found = False
746     pkglist = []
747     try:
748         root = xmlparse(patterns)
749     except SyntaxError:
750         raise SyntaxError("%s syntax error." % patterns)
751
752     for elm in list(root.getroot()):
753         ns = elm.tag
754         ns = ns[0:ns.rindex("}")+1]
755         name = elm.find("%sname" % ns)
756         summary = elm.find("%ssummary" % ns)
757         if name.text == group or summary.text == group:
758             found = True
759             break
760
761     if not found:
762         return pkglist
763
764     found = False
765     for requires in list(elm):
766         if requires.tag.endswith("requires"):
767             found = True
768             break
769
770     if not found:
771         return pkglist
772
773     for pkg in list(requires):
774         pkgname = pkg.attrib["name"]
775         if pkgname not in pkglist:
776             pkglist.append(pkgname)
777
778     return pkglist
779
780 def get_pkglist_in_comps(group, comps):
781     found = False
782     pkglist = []
783     try:
784         root = xmlparse(comps)
785     except SyntaxError:
786         raise SyntaxError("%s syntax error." % comps)
787
788     for elm in root.getiterator("group"):
789         id = elm.find("id")
790         name = elm.find("name")
791         if id.text == group or name.text == group:
792             packagelist = elm.find("packagelist")
793             found = True
794             break
795
796     if not found:
797         return pkglist
798
799     for require in elm.getiterator("packagereq"):
800         if require.tag.endswith("packagereq"):
801             pkgname = require.text
802         if pkgname not in pkglist:
803             pkglist.append(pkgname)
804
805     return pkglist
806
807 def is_statically_linked(binary):
808     return ", statically linked, " in runner.outs(['file', binary])
809
810 def setup_qemu_emulator(rootdir, arch):
811     # mount binfmt_misc if it doesn't exist
812     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
813         modprobecmd = find_binary_path("modprobe")
814         runner.show([modprobecmd, "binfmt_misc"])
815     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
816         mountcmd = find_binary_path("mount")
817         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
818
819     # qemu_emulator is a special case, we can't use find_binary_path
820     # qemu emulator should be a statically-linked executable file
821     qemu_emulator = "/usr/bin/qemu-arm"
822     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
823         qemu_emulator = "/usr/bin/qemu-arm-static"
824     if not os.path.exists(qemu_emulator):
825         raise CreatorError("Please install a statically-linked qemu-arm")
826
827     # qemu emulator version check
828     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
829     if arch in armv7_list:  # need qemu (>=0.13.0)
830         qemuout = runner.outs([qemu_emulator, "-h"])
831         m = re.search("version\s*([.\d]+)", qemuout)
832         if m:
833             qemu_version = m.group(1)
834             if qemu_version < "0.13":
835                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
836         else:
837             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
838
839     if not os.path.exists(rootdir + "/usr/bin"):
840         makedirs(rootdir + "/usr/bin")
841     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
842
843     # disable selinux, selinux will block qemu emulator to run
844     if os.path.exists("/usr/sbin/setenforce"):
845         msger.info('Try to disable selinux')
846         runner.show(["/usr/sbin/setenforce", "0"])
847
848     node = "/proc/sys/fs/binfmt_misc/arm"
849     if is_statically_linked(qemu_emulator) and os.path.exists(node):
850         return qemu_emulator
851
852     # unregister it if it has been registered and is a dynamically-linked executable
853     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
854         qemu_unregister_string = "-1\n"
855         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
856         fd.write(qemu_unregister_string)
857         fd.close()
858
859     # register qemu emulator for interpreting other arch executable file
860     if not os.path.exists(node):
861         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
862         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
863         fd.write(qemu_arm_string)
864         fd.close()
865
866     return qemu_emulator
867
868 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
869     def get_source_repometadata(repometadata):
870         src_repometadata=[]
871         for repo in repometadata:
872             if repo["name"].endswith("-source"):
873                 src_repometadata.append(repo)
874         if src_repometadata:
875             return src_repometadata
876         return None
877
878     def get_src_name(srpm):
879         m = SRPM_RE.match(srpm)
880         if m:
881             return m.group(1)
882         return None
883
884     src_repometadata = get_source_repometadata(repometadata)
885
886     if not src_repometadata:
887         msger.warning("No source repo found")
888         return None
889
890     src_pkgs = []
891     lpkgs_dict = {}
892     lpkgs_path = []
893     for repo in src_repometadata:
894         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
895         lpkgs_path += glob.glob(cachepath)
896
897     for lpkg in lpkgs_path:
898         lpkg_name = get_src_name(os.path.basename(lpkg))
899         lpkgs_dict[lpkg_name] = lpkg
900     localpkgs = lpkgs_dict.keys()
901
902     cached_count = 0
903     destdir = instroot+'/usr/src/SRPMS'
904     if not os.path.exists(destdir):
905         os.makedirs(destdir)
906
907     srcpkgset = set()
908     for _pkg in pkgs:
909         srcpkg_name = get_source_name(_pkg, repometadata)
910         if not srcpkg_name:
911             continue
912         srcpkgset.add(srcpkg_name)
913
914     for pkg in list(srcpkgset):
915         if pkg in localpkgs:
916             cached_count += 1
917             shutil.copy(lpkgs_dict[pkg], destdir)
918             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
919         else:
920             src_pkg = get_package(pkg, src_repometadata, 'src')
921             if src_pkg:
922                 shutil.copy(src_pkg, destdir)
923                 src_pkgs.append(src_pkg)
924     msger.info("%d source packages gotten from cache" % cached_count)
925
926     return src_pkgs
927
928 def strip_end(text, suffix):
929     if not text.endswith(suffix):
930         return text
931     return text[:-len(suffix)]