243051eafacc8cb337ffe5285ca6ea74b5cb3514
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.utils.errors import CreatorError, SquashfsError
45 from mic.utils.fs_related import find_binary_path, makedirs
46 from mic.utils.grabber import myurlgrab
47 from mic.utils.proxy import get_proxy_for
48 from mic.utils import runner
49 from mic.utils import rpmmisc
50 from mic.utils.safeurl import SafeURL
51
52
53 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
54 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
55 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
56
57
58 def build_name(kscfg, release=None, prefix = None, suffix = None):
59     """Construct and return an image name string.
60
61     This is a utility function to help create sensible name and fslabel
62     strings. The name is constructed using the sans-prefix-and-extension
63     kickstart filename and the supplied prefix and suffix.
64
65     kscfg -- a path to a kickstart file
66     release --  a replacement to suffix for image release
67     prefix -- a prefix to prepend to the name; defaults to None, which causes
68               no prefix to be used
69     suffix -- a suffix to append to the name; defaults to None, which causes
70               a YYYYMMDDHHMM suffix to be used
71
72     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
73
74     """
75     name = os.path.basename(kscfg)
76     idx = name.rfind('.')
77     if idx >= 0:
78         name = name[:idx]
79
80     if release is not None:
81         suffix = ""
82     if prefix is None:
83         prefix = ""
84     if suffix is None:
85         suffix = time.strftime("%Y%m%d%H%M")
86
87     if name.startswith(prefix):
88         name = name[len(prefix):]
89
90     prefix = "%s-" % prefix if prefix else ""
91     suffix = "-%s" % suffix if suffix else ""
92
93     ret = prefix + name + suffix
94     return ret
95
96 def get_distro():
97     """Detect linux distribution, support "meego"
98     """
99
100     support_dists = ('SuSE',
101                      'debian',
102                      'fedora',
103                      'redhat',
104                      'centos',
105                      'meego',
106                      'moblin',
107                      'tizen')
108     try:
109         (dist, ver, id) = platform.linux_distribution( \
110                               supported_dists = support_dists)
111     except:
112         (dist, ver, id) = platform.dist( \
113                               supported_dists = support_dists)
114
115     return (dist, ver, id)
116
117 def get_hostname():
118     """Get hostname
119     """
120     return platform.node()
121
122 def get_hostname_distro_str():
123     """Get composited string for current linux distribution
124     """
125     (dist, ver, id) = get_distro()
126     hostname = get_hostname()
127
128     if not dist:
129         return "%s(Unknown Linux Distribution)" % hostname
130     else:
131         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
132         return distro_str.strip()
133
134 _LOOP_RULE_PTH = None
135
136 def hide_loopdev_presentation():
137     udev_rules = "80-prevent-loop-present.rules"
138     udev_rules_dir = [
139                        '/usr/lib/udev/rules.d/',
140                        '/lib/udev/rules.d/',
141                        '/etc/udev/rules.d/'
142                      ]
143
144     global _LOOP_RULE_PTH
145
146     for rdir in udev_rules_dir:
147         if os.path.exists(rdir):
148             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
149
150     if not _LOOP_RULE_PTH:
151         return
152
153     try:
154         with open(_LOOP_RULE_PTH, 'w') as wf:
155             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
156
157         runner.quiet('udevadm trigger')
158     except:
159         pass
160
161 def unhide_loopdev_presentation():
162     global _LOOP_RULE_PTH
163
164     if not _LOOP_RULE_PTH:
165         return
166
167     try:
168         os.unlink(_LOOP_RULE_PTH)
169         runner.quiet('udevadm trigger')
170     except:
171         pass
172
173 def extract_rpm(rpmfile, targetdir):
174     rpm2cpio = find_binary_path("rpm2cpio")
175     cpio = find_binary_path("cpio")
176
177     olddir = os.getcwd()
178     os.chdir(targetdir)
179
180     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
181     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
182     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
183                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
184     p1.stdout.close()
185     (sout, serr) = p2.communicate()
186     msger.verbose(sout or serr)
187
188     os.chdir(olddir)
189
190 def compressing(fpath, method):
191     comp_map = {
192         "gz": ["pgzip", "pigz", "gzip"],
193         "bz2": ["pbzip2", "bzip2"],
194     }
195     if method not in comp_map:
196         raise CreatorError("Unsupport compress format: %s, valid values: %s"
197                            % (method, ','.join(comp_map.keys())))
198     cmd = None
199     for cmdname in comp_map[method]:
200         try:
201             cmd = find_binary_path(cmdname)
202             break
203         except CreatorError as err:
204             pass
205     if not cmd:
206         raise CreatorError("Command %s not available" % cmdname)
207     rc = runner.show([cmd, "-f", fpath])
208     if rc:
209         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
210
211 def taring(dstfile, target):
212     import tarfile
213     basen, ext = os.path.splitext(dstfile)
214     comp = {".tar": None,
215             ".gz": "gz", # for .tar.gz
216             ".bz2": "bz2", # for .tar.bz2
217             ".tgz": "gz",
218             ".tbz": "bz2"}[ext]
219
220     # specify tarball file path
221     if not comp:
222         tarpath = dstfile
223     elif basen.endswith(".tar"):
224         tarpath = basen
225     else:
226         tarpath = basen + ".tar"
227     wf = tarfile.open(tarpath, 'w')
228
229     if os.path.isdir(target):
230         for item in os.listdir(target):
231             wf.add(os.path.join(target, item), item)
232     else:
233         wf.add(target, os.path.basename(target))
234     wf.close()
235
236     if comp:
237         compressing(tarpath, comp)
238         # when dstfile ext is ".tgz" and ".tbz", should rename
239         if not basen.endswith(".tar"):
240             shutil.move("%s.%s" % (tarpath, comp), dstfile)
241
242 def ziping(dstfile, target):
243     import zipfile
244     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
245     if os.path.isdir(target):
246         for item in os.listdir(target):
247             fpath = os.path.join(target, item)
248             if not os.path.isfile(fpath):
249                 continue
250             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
251     else:
252         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
253     wf.close()
254
255 pack_formats = {
256     ".tar": taring,
257     ".tar.gz": taring,
258     ".tar.bz2": taring,
259     ".tgz": taring,
260     ".tbz": taring,
261     ".zip": ziping,
262 }
263
264 def packing(dstfile, target):
265     (base, ext) = os.path.splitext(dstfile)
266     if ext in (".gz", ".bz2") and base.endswith(".tar"):
267         ext = ".tar" + ext
268     if ext not in pack_formats:
269         raise CreatorError("Unsupport pack format: %s, valid values: %s"
270                            % (ext, ','.join(pack_formats.keys())))
271     func = pack_formats[ext]
272     # func should be callable
273     func(dstfile, target)
274
275 def human_size(size):
276     """Return human readable string for Bytes size
277     """
278
279     if size <= 0:
280         return "0M"
281     import math
282     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
283     expo = int(math.log(size, 1024))
284     mant = float(size/math.pow(1024, expo))
285     return "{0:.1f}{1:s}".format(mant, measure[expo])
286
287 def get_block_size(file_obj):
288     """ Returns block size for file object 'file_obj'. Errors are indicated by
289     the 'IOError' exception. """
290
291     from fcntl import ioctl
292     import struct
293
294     # Get the block size of the host file-system for the image file by calling
295     # the FIGETBSZ ioctl (number 2).
296     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
297     return struct.unpack('I', binary_data)[0]
298
299 def check_space_pre_cp(src, dst):
300     """Check whether disk space is enough before 'cp' like
301     operations, else exception will be raised.
302     """
303
304     srcsize  = get_file_size(src) * 1024 * 1024
305     freesize = get_filesystem_avail(dst)
306     if srcsize > freesize:
307         raise CreatorError("space on %s(%s) is not enough for about %s files"
308                            % (dst, human_size(freesize), human_size(srcsize)))
309
310 def calc_hashes(file_path, hash_names, start = 0, end = None):
311     """ Calculate hashes for a file. The 'file_path' argument is the file
312     to calculate hash functions for, 'start' and 'end' are the starting and
313     ending file offset to calculate the has functions for. The 'hash_names'
314     argument is a list of hash names to calculate. Returns the the list
315     of calculated hash values in the hexadecimal form in the same order
316     as 'hash_names'.
317     """
318     if end == None:
319         end = os.path.getsize(file_path)
320
321     chunk_size = 65536
322     to_read = end - start
323     read = 0
324
325     hashes = []
326     for hash_name in hash_names:
327         hashes.append(hashlib.new(hash_name))
328
329     with open(file_path, "rb") as f:
330         f.seek(start)
331
332         while read < to_read:
333             if read + chunk_size > to_read:
334                 chunk_size = to_read - read
335             chunk = f.read(chunk_size)
336             for hash_obj in hashes:
337                 hash_obj.update(chunk)
338             read += chunk_size
339
340     result = []
341     for hash_obj in hashes:
342         result.append(hash_obj.hexdigest())
343
344     return result
345
346 def get_md5sum(fpath):
347     return calc_hashes(fpath, ('md5', ))[0]
348
349
350 def normalize_ksfile(ksconf, release, arch):
351     '''
352     Return the name of a normalized ks file in which macro variables
353     @BUILD_ID@ and @ARCH@ are replace with real values.
354
355     The original ks file is returned if no special macro is used, otherwise
356     a temp file is created and returned, which will be deleted when program
357     exits normally.
358     '''
359
360     if not release:
361         release = "latest"
362     if not arch or re.match(r'i.86', arch):
363         arch = "ia32"
364
365     with open(ksconf) as f:
366         ksc = f.read()
367
368     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
369         return ksconf
370
371     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
372     ksc = ksc.replace("@ARCH@", arch)
373     ksc = ksc.replace("@BUILD_ID@", release)
374
375     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
376     os.write(fd, ksc)
377     os.close(fd)
378
379     msger.debug('normalized ks file:%s' % ksconf)
380
381     def remove_temp_ks():
382         try:
383             os.unlink(ksconf)
384         except OSError, err:
385             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
386
387     import atexit
388     atexit.register(remove_temp_ks)
389
390     return ksconf
391
392
393 def _check_mic_chroot(rootdir):
394     def _path(path):
395         return rootdir.rstrip('/') + path
396
397     release_files = map(_path, [ "/etc/moblin-release",
398                                  "/etc/meego-release",
399                                  "/etc/tizen-release"])
400
401     if not any(map(os.path.exists, release_files)):
402         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
403
404     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
405         msger.warning("Failed to find kernel module under %s" % rootdir)
406
407     return
408
409 def selinux_check(arch, fstypes):
410     try:
411         getenforce = find_binary_path('getenforce')
412     except CreatorError:
413         return
414
415     selinux_status = runner.outs([getenforce])
416     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
417         raise CreatorError("Can't create arm image if selinux is enabled, "
418                            "please run 'setenforce 0' to disable selinux")
419
420     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
421     if use_btrfs and selinux_status == "Enforcing":
422         raise CreatorError("Can't create btrfs image if selinux is enabled,"
423                            " please run 'setenforce 0' to disable selinux")
424
425 def get_image_type(path):
426     def _get_extension_name(path):
427         match = re.search("(?<=\.)\w+$", path)
428         if match:
429             return match.group(0)
430         else:
431             return None
432
433     if os.path.isdir(path):
434         _check_mic_chroot(path)
435         return "fs"
436
437     maptab = {
438               "tar": "loop",
439               "raw":"raw",
440               "vmdk":"vmdk",
441               "vdi":"vdi",
442               "iso":"livecd",
443               "usbimg":"liveusb",
444              }
445
446     extension = _get_extension_name(path)
447     if extension in maptab:
448         return maptab[extension]
449
450     fd = open(path, "rb")
451     file_header = fd.read(1024)
452     fd.close()
453     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
454     if file_header[0:len(vdi_flag)] == vdi_flag:
455         return maptab["vdi"]
456
457     output = runner.outs(['file', path])
458     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
459     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
460     rawptn = re.compile(r".*x86 boot sector.*")
461     vmdkptn = re.compile(r".*VMware. disk image.*")
462     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
463     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
464     btrfsimgptn = re.compile(r".*BTRFS.*")
465     if isoptn.match(output):
466         return maptab["iso"]
467     elif usbimgptn.match(output):
468         return maptab["usbimg"]
469     elif rawptn.match(output):
470         return maptab["raw"]
471     elif vmdkptn.match(output):
472         return maptab["vmdk"]
473     elif ext3fsimgptn.match(output):
474         return "ext3fsimg"
475     elif ext4fsimgptn.match(output):
476         return "ext4fsimg"
477     elif btrfsimgptn.match(output):
478         return "btrfsimg"
479     else:
480         raise CreatorError("Cannot detect the type of image: %s" % path)
481
482
483 def get_file_size(filename):
484     """ Return size in MB unit """
485     cmd = ['du', "-s", "-b", "-B", "1M", filename]
486     rc, duOutput  = runner.runtool(cmd)
487     if rc != 0:
488         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
489     size1 = int(duOutput.split()[0])
490
491     cmd = ['du', "-s", "-B", "1M", filename]
492     rc, duOutput = runner.runtool(cmd)
493     if rc != 0:
494         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
495
496     size2 = int(duOutput.split()[0])
497     return max(size1, size2)
498
499
500 def get_filesystem_avail(fs):
501     vfstat = os.statvfs(fs)
502     return vfstat.f_bavail * vfstat.f_bsize
503
504 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
505     #convert disk format
506     if dstfmt != "raw":
507         raise CreatorError("Invalid destination image format: %s" % dstfmt)
508     msger.debug("converting %s image to %s" % (srcimg, dstimg))
509     if srcfmt == "vmdk":
510         path = find_binary_path("qemu-img")
511         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
512     elif srcfmt == "vdi":
513         path = find_binary_path("VBoxManage")
514         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
515     else:
516         raise CreatorError("Invalid soure image format: %s" % srcfmt)
517
518     rc = runner.show(argv)
519     if rc == 0:
520         msger.debug("convert successful")
521     if rc != 0:
522         raise CreatorError("Unable to convert disk to %s" % dstfmt)
523
524 def uncompress_squashfs(squashfsimg, outdir):
525     """Uncompress file system from squshfs image"""
526     unsquashfs = find_binary_path("unsquashfs")
527     args = [ unsquashfs, "-d", outdir, squashfsimg ]
528     rc = runner.show(args)
529     if (rc != 0):
530         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
531
532 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
533     """ FIXME: use the dir in mic.conf instead """
534
535     makedirs(dir)
536     return tempfile.mkdtemp(dir = dir, prefix = prefix)
537
538 def get_repostrs_from_ks(ks):
539     def _get_temp_reponame(baseurl):
540         md5obj = hashlib.md5(baseurl)
541         tmpreponame = "%s" % md5obj.hexdigest()
542         return tmpreponame
543
544     kickstart_repos = []
545
546     for repodata in ks.handler.repo.repoList:
547         repo = {}
548         for attr in ('name',
549                      'baseurl',
550                      'mirrorlist',
551                      'includepkgs', # val is list
552                      'excludepkgs', # val is list
553                      'cost',    # int
554                      'priority',# int
555                      'save',
556                      'proxy',
557                      'proxyuser',
558                      'proxypasswd',
559                      'proxypasswd',
560                      'debuginfo',
561                      'source',
562                      'gpgkey',
563                      'ssl_verify'):
564             if hasattr(repodata, attr) and getattr(repodata, attr):
565                 repo[attr] = getattr(repodata, attr)
566
567         if 'name' not in repo:
568             repo['name'] = _get_temp_reponame(repodata.baseurl)
569         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
570             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
571                                       getattr(repodata, 'user', None),
572                                       getattr(repodata, 'passwd', None))
573
574         kickstart_repos.append(repo)
575
576     return kickstart_repos
577
578 def _get_uncompressed_data_from_url(url, filename, proxies):
579     filename = myurlgrab(url.full, filename, proxies)
580     suffix = None
581     if filename.endswith(".gz"):
582         suffix = ".gz"
583         runner.quiet(['gunzip', "-f", filename])
584     elif filename.endswith(".bz2"):
585         suffix = ".bz2"
586         runner.quiet(['bunzip2', "-f", filename])
587     if suffix:
588         filename = filename.replace(suffix, "")
589     return filename
590
591 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
592                             sumtype=None, checksum=None):
593     url = baseurl.join(filename)
594     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
595     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
596         filename = os.path.splitext(filename_tmp)[0]
597     else:
598         filename = filename_tmp
599     if sumtype and checksum and os.path.exists(filename):
600         try:
601             sumcmd = find_binary_path("%ssum" % sumtype)
602         except:
603             file_checksum = None
604         else:
605             file_checksum = runner.outs([sumcmd, filename]).split()[0]
606
607         if file_checksum and file_checksum == checksum:
608             return filename
609
610     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
611
612 def get_metadata_from_repos(repos, cachedir):
613     my_repo_metadata = []
614     for repo in repos:
615         reponame = repo.name
616         baseurl = repo.baseurl
617
618         if hasattr(repo, 'proxy'):
619             proxy = repo.proxy
620         else:
621             proxy = get_proxy_for(baseurl)
622
623         proxies = None
624         if proxy:
625             proxies = {str(baseurl.split(":")[0]): str(proxy)}
626
627         makedirs(os.path.join(cachedir, reponame))
628         url = baseurl.join("repodata/repomd.xml")
629         filename = os.path.join(cachedir, reponame, 'repomd.xml')
630         repomd = myurlgrab(url.full, filename, proxies)
631         try:
632             root = xmlparse(repomd)
633         except SyntaxError:
634             raise CreatorError("repomd.xml syntax error.")
635
636         ns = root.getroot().tag
637         ns = ns[0:ns.rindex("}")+1]
638
639         filepaths = {}
640         checksums = {}
641         sumtypes = {}
642
643         for elm in root.getiterator("%sdata" % ns):
644             if elm.attrib["type"] == "patterns":
645                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
646                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
647                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
648                 break
649
650         for elm in root.getiterator("%sdata" % ns):
651             if elm.attrib["type"] in ("group_gz", "group"):
652                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
653                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
654                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
655                 break
656
657         primary_type = None
658         for elm in root.getiterator("%sdata" % ns):
659             if elm.attrib["type"] in ("primary_db", "primary"):
660                 primary_type = elm.attrib["type"]
661                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
662                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
663                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
664                 break
665
666         if not primary_type:
667             continue
668
669         for item in ("primary", "patterns", "comps"):
670             if item not in filepaths:
671                 filepaths[item] = None
672                 continue
673             if not filepaths[item]:
674                 continue
675             filepaths[item] = _get_metadata_from_repo(baseurl,
676                                                       proxies,
677                                                       cachedir,
678                                                       reponame,
679                                                       filepaths[item],
680                                                       sumtypes[item],
681                                                       checksums[item])
682
683         """ Get repo key """
684         try:
685             repokey = _get_metadata_from_repo(baseurl,
686                                               proxies,
687                                               cachedir,
688                                               reponame,
689                                               "repodata/repomd.xml.key")
690         except CreatorError:
691             repokey = None
692             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
693
694         my_repo_metadata.append({"name":reponame,
695                                  "baseurl":baseurl,
696                                  "repomd":repomd,
697                                  "primary":filepaths['primary'],
698                                  "cachedir":cachedir,
699                                  "proxies":proxies,
700                                  "patterns":filepaths['patterns'],
701                                  "comps":filepaths['comps'],
702                                  "repokey":repokey})
703
704     return my_repo_metadata
705
706 def get_rpmver_in_repo(repometadata):
707     for repo in repometadata:
708         if repo["primary"].endswith(".xml"):
709             root = xmlparse(repo["primary"])
710             ns = root.getroot().tag
711             ns = ns[0:ns.rindex("}")+1]
712
713             versionlist = []
714             for elm in root.getiterator("%spackage" % ns):
715                 if elm.find("%sname" % ns).text == 'rpm':
716                     for node in elm.getchildren():
717                         if node.tag == "%sversion" % ns:
718                             versionlist.append(node.attrib['ver'])
719
720             if versionlist:
721                 return reversed(
722                          sorted(
723                            versionlist,
724                            key = lambda ver: map(int, ver.split('.')))).next()
725
726         elif repo["primary"].endswith(".sqlite"):
727             con = sqlite.connect(repo["primary"])
728             for row in con.execute("select version from packages where "
729                                    "name=\"rpm\" ORDER by version DESC"):
730                 con.close()
731                 return row[0]
732
733     return None
734
735 def get_arch(repometadata):
736     archlist = []
737     for repo in repometadata:
738         if repo["primary"].endswith(".xml"):
739             root = xmlparse(repo["primary"])
740             ns = root.getroot().tag
741             ns = ns[0:ns.rindex("}")+1]
742             for elm in root.getiterator("%spackage" % ns):
743                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
744                     arch = elm.find("%sarch" % ns).text
745                     if arch not in archlist:
746                         archlist.append(arch)
747         elif repo["primary"].endswith(".sqlite"):
748             con = sqlite.connect(repo["primary"])
749             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
750                 if row[0] not in archlist:
751                     archlist.append(row[0])
752
753             con.close()
754
755     uniq_arch = []
756     for i in range(len(archlist)):
757         if archlist[i] not in rpmmisc.archPolicies.keys():
758             continue
759         need_append = True
760         j = 0
761         while j < len(uniq_arch):
762             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
763                 need_append = False
764                 break
765             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
766                 if need_append:
767                     uniq_arch[j] = archlist[i]
768                     need_append = False
769                 else:
770                     uniq_arch.remove(uniq_arch[j])
771                     continue
772             j += 1
773         if need_append:
774              uniq_arch.append(archlist[i])
775
776     return uniq_arch, archlist
777
778 def get_package(pkg, repometadata, arch = None):
779     ver = ""
780     target_repo = None
781     if not arch:
782         arches = []
783     elif arch not in rpmmisc.archPolicies:
784         arches = [arch]
785     else:
786         arches = rpmmisc.archPolicies[arch].split(':')
787         arches.append('noarch')
788
789     for repo in repometadata:
790         if repo["primary"].endswith(".xml"):
791             root = xmlparse(repo["primary"])
792             ns = root.getroot().tag
793             ns = ns[0:ns.rindex("}")+1]
794             for elm in root.getiterator("%spackage" % ns):
795                 if elm.find("%sname" % ns).text == pkg:
796                     if elm.find("%sarch" % ns).text in arches:
797                         version = elm.find("%sversion" % ns)
798                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
799                         if tmpver > ver:
800                             ver = tmpver
801                             location = elm.find("%slocation" % ns)
802                             pkgpath = "%s" % location.attrib['href']
803                             target_repo = repo
804                         break
805         if repo["primary"].endswith(".sqlite"):
806             con = sqlite.connect(repo["primary"])
807             if arch:
808                 sql = 'select version, release, location_href from packages ' \
809                       'where name = "%s" and arch IN ("%s")' % \
810                       (pkg, '","'.join(arches))
811                 for row in con.execute(sql):
812                     tmpver = "%s-%s" % (row[0], row[1])
813                     if tmpver > ver:
814                         ver = tmpver
815                         pkgpath = "%s" % row[2]
816                         target_repo = repo
817                     break
818             else:
819                 sql = 'select version, release, location_href from packages ' \
820                       'where name = "%s"' % pkg
821                 for row in con.execute(sql):
822                     tmpver = "%s-%s" % (row[0], row[1])
823                     if tmpver > ver:
824                         ver = tmpver
825                         pkgpath = "%s" % row[2]
826                         target_repo = repo
827                     break
828             con.close()
829     if target_repo:
830         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
831         url = target_repo["baseurl"].join(pkgpath)
832         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
833         if os.path.exists(filename):
834             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
835             if ret == 0:
836                 return filename
837
838             msger.warning("package %s is damaged: %s" %
839                           (os.path.basename(filename), filename))
840             os.unlink(filename)
841
842         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
843         return pkg
844     else:
845         return None
846
847 def get_source_name(pkg, repometadata):
848
849     def get_bin_name(pkg):
850         m = RPM_RE.match(pkg)
851         if m:
852             return m.group(1)
853         return None
854
855     def get_src_name(srpm):
856         m = SRPM_RE.match(srpm)
857         if m:
858             return m.group(1)
859         return None
860
861     ver = ""
862     target_repo = None
863
864     pkg_name = get_bin_name(pkg)
865     if not pkg_name:
866         return None
867
868     for repo in repometadata:
869         if repo["primary"].endswith(".xml"):
870             root = xmlparse(repo["primary"])
871             ns = root.getroot().tag
872             ns = ns[0:ns.rindex("}")+1]
873             for elm in root.getiterator("%spackage" % ns):
874                 if elm.find("%sname" % ns).text == pkg_name:
875                     if elm.find("%sarch" % ns).text != "src":
876                         version = elm.find("%sversion" % ns)
877                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
878                         if tmpver > ver:
879                             ver = tmpver
880                             fmt = elm.find("%sformat" % ns)
881                             if fmt:
882                                 fns = fmt.getchildren()[0].tag
883                                 fns = fns[0:fns.rindex("}")+1]
884                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
885                                 target_repo = repo
886                         break
887
888         if repo["primary"].endswith(".sqlite"):
889             con = sqlite.connect(repo["primary"])
890             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
891                 tmpver = "%s-%s" % (row[0], row[1])
892                 if tmpver > ver:
893                     pkgpath = "%s" % row[2]
894                     target_repo = repo
895                 break
896             con.close()
897     if target_repo:
898         return get_src_name(pkgpath)
899     else:
900         return None
901
902 def get_pkglist_in_patterns(group, patterns):
903     found = False
904     pkglist = []
905     try:
906         root = xmlparse(patterns)
907     except SyntaxError:
908         raise SyntaxError("%s syntax error." % patterns)
909
910     for elm in list(root.getroot()):
911         ns = elm.tag
912         ns = ns[0:ns.rindex("}")+1]
913         name = elm.find("%sname" % ns)
914         summary = elm.find("%ssummary" % ns)
915         if name.text == group or summary.text == group:
916             found = True
917             break
918
919     if not found:
920         return pkglist
921
922     found = False
923     for requires in list(elm):
924         if requires.tag.endswith("requires"):
925             found = True
926             break
927
928     if not found:
929         return pkglist
930
931     for pkg in list(requires):
932         pkgname = pkg.attrib["name"]
933         if pkgname not in pkglist:
934             pkglist.append(pkgname)
935
936     return pkglist
937
938 def get_pkglist_in_comps(group, comps):
939     found = False
940     pkglist = []
941     try:
942         root = xmlparse(comps)
943     except SyntaxError:
944         raise SyntaxError("%s syntax error." % comps)
945
946     for elm in root.getiterator("group"):
947         id = elm.find("id")
948         name = elm.find("name")
949         if id.text == group or name.text == group:
950             packagelist = elm.find("packagelist")
951             found = True
952             break
953
954     if not found:
955         return pkglist
956
957     for require in elm.getiterator("packagereq"):
958         if require.tag.endswith("packagereq"):
959             pkgname = require.text
960         if pkgname not in pkglist:
961             pkglist.append(pkgname)
962
963     return pkglist
964
965 def is_statically_linked(binary):
966     return ", statically linked, " in runner.outs(['file', binary])
967
968 def setup_qemu_emulator(rootdir, arch):
969     # mount binfmt_misc if it doesn't exist
970     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
971         modprobecmd = find_binary_path("modprobe")
972         runner.show([modprobecmd, "binfmt_misc"])
973     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
974         mountcmd = find_binary_path("mount")
975         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
976
977     # qemu_emulator is a special case, we can't use find_binary_path
978     # qemu emulator should be a statically-linked executable file
979     if arch == "aarch64":
980         arm_binary = "qemu-arm64"
981         node = "/proc/sys/fs/binfmt_misc/aarch64"
982     else:
983         arm_binary = "qemu-arm"
984         node = "/proc/sys/fs/binfmt_misc/arm"
985
986     qemu_emulator = "/usr/bin/%s" % arm_binary
987     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
988         qemu_emulator = "/usr/bin/%s-static" % arm_binary
989     if not os.path.exists(qemu_emulator):
990         raise CreatorError("Please install a statically-linked %s" % arm_binary)
991
992     if not os.path.exists(rootdir + "/usr/bin"):
993         makedirs(rootdir + "/usr/bin")
994     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
995
996     # disable selinux, selinux will block qemu emulator to run
997     if os.path.exists("/usr/sbin/setenforce"):
998         msger.info('Try to disable selinux')
999         runner.show(["/usr/sbin/setenforce", "0"])
1000
1001     # unregister it if it has been registered and is a dynamically-linked executable
1002     if os.path.exists(node):
1003         qemu_unregister_string = "-1\n"
1004         with open(node, "w") as fd:
1005             fd.write(qemu_unregister_string)
1006
1007     # register qemu emulator for interpreting other arch executable file
1008     if not os.path.exists(node):
1009         if arch == "aarch64":
1010             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
1011         else:
1012             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
1013         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
1014             fd.write(qemu_arm_string)
1015
1016     return qemu_emulator
1017
1018 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1019     def get_source_repometadata(repometadata):
1020         src_repometadata=[]
1021         for repo in repometadata:
1022             if repo["name"].endswith("-source"):
1023                 src_repometadata.append(repo)
1024         if src_repometadata:
1025             return src_repometadata
1026         return None
1027
1028     def get_src_name(srpm):
1029         m = SRPM_RE.match(srpm)
1030         if m:
1031             return m.group(1)
1032         return None
1033
1034     src_repometadata = get_source_repometadata(repometadata)
1035
1036     if not src_repometadata:
1037         msger.warning("No source repo found")
1038         return None
1039
1040     src_pkgs = []
1041     lpkgs_dict = {}
1042     lpkgs_path = []
1043     for repo in src_repometadata:
1044         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1045         lpkgs_path += glob.glob(cachepath)
1046
1047     for lpkg in lpkgs_path:
1048         lpkg_name = get_src_name(os.path.basename(lpkg))
1049         lpkgs_dict[lpkg_name] = lpkg
1050     localpkgs = lpkgs_dict.keys()
1051
1052     cached_count = 0
1053     destdir = instroot+'/usr/src/SRPMS'
1054     if not os.path.exists(destdir):
1055         os.makedirs(destdir)
1056
1057     srcpkgset = set()
1058     for _pkg in pkgs:
1059         srcpkg_name = get_source_name(_pkg, repometadata)
1060         if not srcpkg_name:
1061             continue
1062         srcpkgset.add(srcpkg_name)
1063
1064     for pkg in list(srcpkgset):
1065         if pkg in localpkgs:
1066             cached_count += 1
1067             shutil.copy(lpkgs_dict[pkg], destdir)
1068             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1069         else:
1070             src_pkg = get_package(pkg, src_repometadata, 'src')
1071             if src_pkg:
1072                 shutil.copy(src_pkg, destdir)
1073                 src_pkgs.append(src_pkg)
1074     msger.info("%d source packages gotten from cache" % cached_count)
1075
1076     return src_pkgs
1077
1078 def strip_end(text, suffix):
1079     if not text.endswith(suffix):
1080         return text
1081     return text[:-len(suffix)]