Add --user/--passwd to repo command in KS.
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.utils.errors import CreatorError, SquashfsError
45 from mic.utils.fs_related import find_binary_path, makedirs
46 from mic.utils.grabber import myurlgrab
47 from mic.utils.proxy import get_proxy_for
48 from mic.utils import runner
49 from mic.utils import rpmmisc
50 from mic.utils.safeurl import SafeURL
51
52
53 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
54 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
55 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
56
57
58 def build_name(kscfg, release=None, prefix = None, suffix = None):
59     """Construct and return an image name string.
60
61     This is a utility function to help create sensible name and fslabel
62     strings. The name is constructed using the sans-prefix-and-extension
63     kickstart filename and the supplied prefix and suffix.
64
65     kscfg -- a path to a kickstart file
66     release --  a replacement to suffix for image release
67     prefix -- a prefix to prepend to the name; defaults to None, which causes
68               no prefix to be used
69     suffix -- a suffix to append to the name; defaults to None, which causes
70               a YYYYMMDDHHMM suffix to be used
71
72     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
73
74     """
75     name = os.path.basename(kscfg)
76     idx = name.rfind('.')
77     if idx >= 0:
78         name = name[:idx]
79
80     if release is not None:
81         suffix = ""
82     if prefix is None:
83         prefix = ""
84     if suffix is None:
85         suffix = time.strftime("%Y%m%d%H%M")
86
87     if name.startswith(prefix):
88         name = name[len(prefix):]
89
90     prefix = "%s-" % prefix if prefix else ""
91     suffix = "-%s" % suffix if suffix else ""
92
93     ret = prefix + name + suffix
94     return ret
95
96 def get_distro():
97     """Detect linux distribution, support "meego"
98     """
99
100     support_dists = ('SuSE',
101                      'debian',
102                      'fedora',
103                      'redhat',
104                      'centos',
105                      'meego',
106                      'moblin',
107                      'tizen')
108     try:
109         (dist, ver, id) = platform.linux_distribution( \
110                               supported_dists = support_dists)
111     except:
112         (dist, ver, id) = platform.dist( \
113                               supported_dists = support_dists)
114
115     return (dist, ver, id)
116
117 def get_distro_str():
118     """Get composited string for current linux distribution
119     """
120     (dist, ver, id) = get_distro()
121
122     if not dist:
123         return 'Unknown Linux Distro'
124     else:
125         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
126         return distro_str.strip()
127
128 _LOOP_RULE_PTH = None
129
130 def hide_loopdev_presentation():
131     udev_rules = "80-prevent-loop-present.rules"
132     udev_rules_dir = [
133                        '/usr/lib/udev/rules.d/',
134                        '/lib/udev/rules.d/',
135                        '/etc/udev/rules.d/'
136                      ]
137
138     global _LOOP_RULE_PTH
139
140     for rdir in udev_rules_dir:
141         if os.path.exists(rdir):
142             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
143
144     if not _LOOP_RULE_PTH:
145         return
146
147     try:
148         with open(_LOOP_RULE_PTH, 'w') as wf:
149             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
150
151         runner.quiet('udevadm trigger')
152     except:
153         pass
154
155 def unhide_loopdev_presentation():
156     global _LOOP_RULE_PTH
157
158     if not _LOOP_RULE_PTH:
159         return
160
161     try:
162         os.unlink(_LOOP_RULE_PTH)
163         runner.quiet('udevadm trigger')
164     except:
165         pass
166
167 def extract_rpm(rpmfile, targetdir):
168     rpm2cpio = find_binary_path("rpm2cpio")
169     cpio = find_binary_path("cpio")
170
171     olddir = os.getcwd()
172     os.chdir(targetdir)
173
174     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
175     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
176     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
177                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
178     p1.stdout.close()
179     (sout, serr) = p2.communicate()
180     msger.verbose(sout or serr)
181
182     os.chdir(olddir)
183
184 def compressing(fpath, method):
185     comp_map = {
186         "gz": ["pgzip", "pigz", "gzip"],
187         "bz2": ["pbzip2", "bzip2"],
188     }
189     if method not in comp_map:
190         raise CreatorError("Unsupport compress format: %s, valid values: %s"
191                            % (method, ','.join(comp_map.keys())))
192     cmd = None
193     for cmdname in comp_map[method]:
194         try:
195             cmd = find_binary_path(cmdname)
196             break
197         except CreatorError as err:
198             pass
199     if not cmd:
200         raise CreatorError("Command %s not available" % cmdname)
201     rc = runner.show([cmd, "-f", fpath])
202     if rc:
203         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
204
205 def taring(dstfile, target):
206     import tarfile
207     basen, ext = os.path.splitext(dstfile)
208     comp = {".tar": None,
209             ".gz": "gz", # for .tar.gz
210             ".bz2": "bz2", # for .tar.bz2
211             ".tgz": "gz",
212             ".tbz": "bz2"}[ext]
213
214     # specify tarball file path
215     if not comp:
216         tarpath = dstfile
217     elif basen.endswith(".tar"):
218         tarpath = basen
219     else:
220         tarpath = basen + ".tar"
221     wf = tarfile.open(tarpath, 'w')
222
223     if os.path.isdir(target):
224         for item in os.listdir(target):
225             wf.add(os.path.join(target, item), item)
226     else:
227         wf.add(target, os.path.basename(target))
228     wf.close()
229
230     if comp:
231         compressing(tarpath, comp)
232         # when dstfile ext is ".tgz" and ".tbz", should rename
233         if not basen.endswith(".tar"):
234             shutil.move("%s.%s" % (tarpath, comp), dstfile)
235
236 def ziping(dstfile, target):
237     import zipfile
238     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
239     if os.path.isdir(target):
240         for item in os.listdir(target):
241             fpath = os.path.join(target, item)
242             if not os.path.isfile(fpath):
243                 continue
244             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
245     else:
246         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
247     wf.close()
248
249 pack_formats = {
250     ".tar": taring,
251     ".tar.gz": taring,
252     ".tar.bz2": taring,
253     ".tgz": taring,
254     ".tbz": taring,
255     ".zip": ziping,
256 }
257
258 def packing(dstfile, target):
259     (base, ext) = os.path.splitext(dstfile)
260     if ext in (".gz", ".bz2") and base.endswith(".tar"):
261         ext = ".tar" + ext
262     if ext not in pack_formats:
263         raise CreatorError("Unsupport pack format: %s, valid values: %s"
264                            % (ext, ','.join(pack_formats.keys())))
265     func = pack_formats[ext]
266     # func should be callable
267     func(dstfile, target)
268
269 def human_size(size):
270     """Return human readable string for Bytes size
271     """
272
273     if size <= 0:
274         return "0M"
275     import math
276     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
277     expo = int(math.log(size, 1024))
278     mant = float(size/math.pow(1024, expo))
279     return "{0:.1f}{1:s}".format(mant, measure[expo])
280
281 def get_block_size(file_obj):
282     """ Returns block size for file object 'file_obj'. Errors are indicated by
283     the 'IOError' exception. """
284
285     from fcntl import ioctl
286     import struct
287
288     # Get the block size of the host file-system for the image file by calling
289     # the FIGETBSZ ioctl (number 2).
290     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
291     return struct.unpack('I', binary_data)[0]
292
293 def check_space_pre_cp(src, dst):
294     """Check whether disk space is enough before 'cp' like
295     operations, else exception will be raised.
296     """
297
298     srcsize  = get_file_size(src) * 1024 * 1024
299     freesize = get_filesystem_avail(dst)
300     if srcsize > freesize:
301         raise CreatorError("space on %s(%s) is not enough for about %s files"
302                            % (dst, human_size(freesize), human_size(srcsize)))
303
304 def calc_hashes(file_path, hash_names, start = 0, end = None):
305     """ Calculate hashes for a file. The 'file_path' argument is the file
306     to calculate hash functions for, 'start' and 'end' are the starting and
307     ending file offset to calculate the has functions for. The 'hash_names'
308     argument is a list of hash names to calculate. Returns the the list
309     of calculated hash values in the hexadecimal form in the same order
310     as 'hash_names'.
311     """
312     if end == None:
313         end = os.path.getsize(file_path)
314
315     chunk_size = 65536
316     to_read = end - start
317     read = 0
318
319     hashes = []
320     for hash_name in hash_names:
321         hashes.append(hashlib.new(hash_name))
322
323     with open(file_path, "rb") as f:
324         f.seek(start)
325
326         while read < to_read:
327             if read + chunk_size > to_read:
328                 chunk_size = to_read - read
329             chunk = f.read(chunk_size)
330             for hash_obj in hashes:
331                 hash_obj.update(chunk)
332             read += chunk_size
333
334     result = []
335     for hash_obj in hashes:
336         result.append(hash_obj.hexdigest())
337
338     return result
339
340 def get_md5sum(fpath):
341     return calc_hashes(fpath, ('md5', ))[0]
342
343
344 def normalize_ksfile(ksconf, release, arch):
345     '''
346     Return the name of a normalized ks file in which macro variables
347     @BUILD_ID@ and @ARCH@ are replace with real values.
348
349     The original ks file is returned if no special macro is used, otherwise
350     a temp file is created and returned, which will be deleted when program
351     exits normally.
352     '''
353
354     if not release:
355         release = "latest"
356     if not arch or re.match(r'i.86', arch):
357         arch = "ia32"
358
359     with open(ksconf) as f:
360         ksc = f.read()
361
362     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
363         return ksconf
364
365     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
366     ksc = ksc.replace("@ARCH@", arch)
367     ksc = ksc.replace("@BUILD_ID@", release)
368
369     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
370     os.write(fd, ksc)
371     os.close(fd)
372
373     msger.debug('normalized ks file:%s' % ksconf)
374
375     def remove_temp_ks():
376         try:
377             os.unlink(ksconf)
378         except OSError, err:
379             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
380
381     import atexit
382     atexit.register(remove_temp_ks)
383
384     return ksconf
385
386
387 def _check_mic_chroot(rootdir):
388     def _path(path):
389         return rootdir.rstrip('/') + path
390
391     release_files = map(_path, [ "/etc/moblin-release",
392                                  "/etc/meego-release",
393                                  "/etc/tizen-release"])
394
395     if not any(map(os.path.exists, release_files)):
396         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
397
398     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
399         msger.warning("Failed to find kernel module under %s" % rootdir)
400
401     return
402
403 def selinux_check(arch, fstypes):
404     try:
405         getenforce = find_binary_path('getenforce')
406     except CreatorError:
407         return
408
409     selinux_status = runner.outs([getenforce])
410     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
411         raise CreatorError("Can't create arm image if selinux is enabled, "
412                            "please run 'setenforce 0' to disable selinux")
413
414     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
415     if use_btrfs and selinux_status == "Enforcing":
416         raise CreatorError("Can't create btrfs image if selinux is enabled,"
417                            " please run 'setenforce 0' to disable selinux")
418
419 def get_image_type(path):
420     def _get_extension_name(path):
421         match = re.search("(?<=\.)\w+$", path)
422         if match:
423             return match.group(0)
424         else:
425             return None
426
427     if os.path.isdir(path):
428         _check_mic_chroot(path)
429         return "fs"
430
431     maptab = {
432               "tar": "loop",
433               "raw":"raw",
434               "vmdk":"vmdk",
435               "vdi":"vdi",
436               "iso":"livecd",
437               "usbimg":"liveusb",
438              }
439
440     extension = _get_extension_name(path)
441     if extension in maptab:
442         return maptab[extension]
443
444     fd = open(path, "rb")
445     file_header = fd.read(1024)
446     fd.close()
447     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
448     if file_header[0:len(vdi_flag)] == vdi_flag:
449         return maptab["vdi"]
450
451     output = runner.outs(['file', path])
452     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
453     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
454     rawptn = re.compile(r".*x86 boot sector.*")
455     vmdkptn = re.compile(r".*VMware. disk image.*")
456     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
457     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
458     btrfsimgptn = re.compile(r".*BTRFS.*")
459     if isoptn.match(output):
460         return maptab["iso"]
461     elif usbimgptn.match(output):
462         return maptab["usbimg"]
463     elif rawptn.match(output):
464         return maptab["raw"]
465     elif vmdkptn.match(output):
466         return maptab["vmdk"]
467     elif ext3fsimgptn.match(output):
468         return "ext3fsimg"
469     elif ext4fsimgptn.match(output):
470         return "ext4fsimg"
471     elif btrfsimgptn.match(output):
472         return "btrfsimg"
473     else:
474         raise CreatorError("Cannot detect the type of image: %s" % path)
475
476
477 def get_file_size(filename):
478     """ Return size in MB unit """
479     cmd = ['du', "-s", "-b", "-B", "1M", filename]
480     rc, duOutput  = runner.runtool(cmd)
481     if rc != 0:
482         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
483     size1 = int(duOutput.split()[0])
484
485     cmd = ['du', "-s", "-B", "1M", filename]
486     rc, duOutput = runner.runtool(cmd)
487     if rc != 0:
488         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
489
490     size2 = int(duOutput.split()[0])
491     return max(size1, size2)
492
493
494 def get_filesystem_avail(fs):
495     vfstat = os.statvfs(fs)
496     return vfstat.f_bavail * vfstat.f_bsize
497
498 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
499     #convert disk format
500     if dstfmt != "raw":
501         raise CreatorError("Invalid destination image format: %s" % dstfmt)
502     msger.debug("converting %s image to %s" % (srcimg, dstimg))
503     if srcfmt == "vmdk":
504         path = find_binary_path("qemu-img")
505         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
506     elif srcfmt == "vdi":
507         path = find_binary_path("VBoxManage")
508         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
509     else:
510         raise CreatorError("Invalid soure image format: %s" % srcfmt)
511
512     rc = runner.show(argv)
513     if rc == 0:
514         msger.debug("convert successful")
515     if rc != 0:
516         raise CreatorError("Unable to convert disk to %s" % dstfmt)
517
518 def uncompress_squashfs(squashfsimg, outdir):
519     """Uncompress file system from squshfs image"""
520     unsquashfs = find_binary_path("unsquashfs")
521     args = [ unsquashfs, "-d", outdir, squashfsimg ]
522     rc = runner.show(args)
523     if (rc != 0):
524         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
525
526 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
527     """ FIXME: use the dir in mic.conf instead """
528
529     makedirs(dir)
530     return tempfile.mkdtemp(dir = dir, prefix = prefix)
531
532 def get_repostrs_from_ks(ks):
533     def _get_temp_reponame(baseurl):
534         md5obj = hashlib.md5(baseurl)
535         tmpreponame = "%s" % md5obj.hexdigest()
536         return tmpreponame
537
538     kickstart_repos = []
539
540     for repodata in ks.handler.repo.repoList:
541         repo = {}
542         for attr in ('name',
543                      'baseurl',
544                      'mirrorlist',
545                      'includepkgs', # val is list
546                      'excludepkgs', # val is list
547                      'cost',    # int
548                      'priority',# int
549                      'save',
550                      'proxy',
551                      'proxyuser',
552                      'proxypasswd',
553                      'proxypasswd',
554                      'debuginfo',
555                      'source',
556                      'gpgkey',
557                      'ssl_verify'):
558             if hasattr(repodata, attr) and getattr(repodata, attr):
559                 repo[attr] = getattr(repodata, attr)
560
561         if 'name' not in repo:
562             repo['name'] = _get_temp_reponame(repodata.baseurl)
563         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
564             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
565                                       getattr(repodata, 'user', None),
566                                       getattr(repodata, 'passwd', None))
567
568         kickstart_repos.append(repo)
569
570     return kickstart_repos
571
572 def _get_uncompressed_data_from_url(url, filename, proxies):
573     filename = myurlgrab(url.full, filename, proxies)
574     suffix = None
575     if filename.endswith(".gz"):
576         suffix = ".gz"
577         runner.quiet(['gunzip', "-f", filename])
578     elif filename.endswith(".bz2"):
579         suffix = ".bz2"
580         runner.quiet(['bunzip2', "-f", filename])
581     if suffix:
582         filename = filename.replace(suffix, "")
583     return filename
584
585 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
586                             sumtype=None, checksum=None):
587     url = baseurl.join(filename)
588     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
589     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
590         filename = os.path.splitext(filename_tmp)[0]
591     else:
592         filename = filename_tmp
593     if sumtype and checksum and os.path.exists(filename):
594         try:
595             sumcmd = find_binary_path("%ssum" % sumtype)
596         except:
597             file_checksum = None
598         else:
599             file_checksum = runner.outs([sumcmd, filename]).split()[0]
600
601         if file_checksum and file_checksum == checksum:
602             return filename
603
604     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
605
606 def get_metadata_from_repos(repos, cachedir):
607     my_repo_metadata = []
608     for repo in repos:
609         reponame = repo['name']
610         baseurl  = repo['baseurl']
611
612         if 'proxy' in repo:
613             proxy = repo['proxy']
614         else:
615             proxy = get_proxy_for(baseurl)
616
617         proxies = None
618         if proxy:
619             proxies = {str(baseurl.split(":")[0]): str(proxy)}
620
621         makedirs(os.path.join(cachedir, reponame))
622         url = baseurl.join("repodata/repomd.xml")
623         filename = os.path.join(cachedir, reponame, 'repomd.xml')
624         repomd = myurlgrab(url.full, filename, proxies)
625         try:
626             root = xmlparse(repomd)
627         except SyntaxError:
628             raise CreatorError("repomd.xml syntax error.")
629
630         ns = root.getroot().tag
631         ns = ns[0:ns.rindex("}")+1]
632
633         filepaths = {}
634         checksums = {}
635         sumtypes = {}
636
637         for elm in root.getiterator("%sdata" % ns):
638             if elm.attrib["type"] == "patterns":
639                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
640                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
641                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
642                 break
643
644         for elm in root.getiterator("%sdata" % ns):
645             if elm.attrib["type"] in ("group_gz", "group"):
646                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
647                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
648                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
649                 break
650
651         primary_type = None
652         for elm in root.getiterator("%sdata" % ns):
653             if elm.attrib["type"] in ("primary_db", "primary"):
654                 primary_type = elm.attrib["type"]
655                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
656                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
657                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
658                 break
659
660         if not primary_type:
661             continue
662
663         for item in ("primary", "patterns", "comps"):
664             if item not in filepaths:
665                 filepaths[item] = None
666                 continue
667             if not filepaths[item]:
668                 continue
669             filepaths[item] = _get_metadata_from_repo(baseurl,
670                                                       proxies,
671                                                       cachedir,
672                                                       reponame,
673                                                       filepaths[item],
674                                                       sumtypes[item],
675                                                       checksums[item])
676
677         """ Get repo key """
678         try:
679             repokey = _get_metadata_from_repo(baseurl,
680                                               proxies,
681                                               cachedir,
682                                               reponame,
683                                               "repodata/repomd.xml.key")
684         except CreatorError:
685             repokey = None
686             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
687
688         my_repo_metadata.append({"name":reponame,
689                                  "baseurl":baseurl,
690                                  "repomd":repomd,
691                                  "primary":filepaths['primary'],
692                                  "cachedir":cachedir,
693                                  "proxies":proxies,
694                                  "patterns":filepaths['patterns'],
695                                  "comps":filepaths['comps'],
696                                  "repokey":repokey})
697
698     return my_repo_metadata
699
700 def get_rpmver_in_repo(repometadata):
701     for repo in repometadata:
702         if repo["primary"].endswith(".xml"):
703             root = xmlparse(repo["primary"])
704             ns = root.getroot().tag
705             ns = ns[0:ns.rindex("}")+1]
706
707             versionlist = []
708             for elm in root.getiterator("%spackage" % ns):
709                 if elm.find("%sname" % ns).text == 'rpm':
710                     for node in elm.getchildren():
711                         if node.tag == "%sversion" % ns:
712                             versionlist.append(node.attrib['ver'])
713
714             if versionlist:
715                 return reversed(
716                          sorted(
717                            versionlist,
718                            key = lambda ver: map(int, ver.split('.')))).next()
719
720         elif repo["primary"].endswith(".sqlite"):
721             con = sqlite.connect(repo["primary"])
722             for row in con.execute("select version from packages where "
723                                    "name=\"rpm\" ORDER by version DESC"):
724                 con.close()
725                 return row[0]
726
727     return None
728
729 def get_arch(repometadata):
730     archlist = []
731     for repo in repometadata:
732         if repo["primary"].endswith(".xml"):
733             root = xmlparse(repo["primary"])
734             ns = root.getroot().tag
735             ns = ns[0:ns.rindex("}")+1]
736             for elm in root.getiterator("%spackage" % ns):
737                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
738                     arch = elm.find("%sarch" % ns).text
739                     if arch not in archlist:
740                         archlist.append(arch)
741         elif repo["primary"].endswith(".sqlite"):
742             con = sqlite.connect(repo["primary"])
743             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
744                 if row[0] not in archlist:
745                     archlist.append(row[0])
746
747             con.close()
748
749     uniq_arch = []
750     for i in range(len(archlist)):
751         if archlist[i] not in rpmmisc.archPolicies.keys():
752             continue
753         need_append = True
754         j = 0
755         while j < len(uniq_arch):
756             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
757                 need_append = False
758                 break
759             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
760                 if need_append:
761                     uniq_arch[j] = archlist[i]
762                     need_append = False
763                 else:
764                     uniq_arch.remove(uniq_arch[j])
765                     continue
766             j += 1
767         if need_append:
768              uniq_arch.append(archlist[i])
769
770     return uniq_arch, archlist
771
772 def get_package(pkg, repometadata, arch = None):
773     ver = ""
774     target_repo = None
775     if not arch:
776         arches = []
777     elif arch not in rpmmisc.archPolicies:
778         arches = [arch]
779     else:
780         arches = rpmmisc.archPolicies[arch].split(':')
781         arches.append('noarch')
782
783     for repo in repometadata:
784         if repo["primary"].endswith(".xml"):
785             root = xmlparse(repo["primary"])
786             ns = root.getroot().tag
787             ns = ns[0:ns.rindex("}")+1]
788             for elm in root.getiterator("%spackage" % ns):
789                 if elm.find("%sname" % ns).text == pkg:
790                     if elm.find("%sarch" % ns).text in arches:
791                         version = elm.find("%sversion" % ns)
792                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
793                         if tmpver > ver:
794                             ver = tmpver
795                             location = elm.find("%slocation" % ns)
796                             pkgpath = "%s" % location.attrib['href']
797                             target_repo = repo
798                         break
799         if repo["primary"].endswith(".sqlite"):
800             con = sqlite.connect(repo["primary"])
801             if arch:
802                 sql = 'select version, release, location_href from packages ' \
803                       'where name = "%s" and arch IN ("%s")' % \
804                       (pkg, '","'.join(arches))
805                 for row in con.execute(sql):
806                     tmpver = "%s-%s" % (row[0], row[1])
807                     if tmpver > ver:
808                         ver = tmpver
809                         pkgpath = "%s" % row[2]
810                         target_repo = repo
811                     break
812             else:
813                 sql = 'select version, release, location_href from packages ' \
814                       'where name = "%s"' % pkg
815                 for row in con.execute(sql):
816                     tmpver = "%s-%s" % (row[0], row[1])
817                     if tmpver > ver:
818                         ver = tmpver
819                         pkgpath = "%s" % row[2]
820                         target_repo = repo
821                     break
822             con.close()
823     if target_repo:
824         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
825         url = target_repo["baseurl"].join(pkgpath)
826         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
827         if os.path.exists(filename):
828             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
829             if ret == 0:
830                 return filename
831
832             msger.warning("package %s is damaged: %s" %
833                           (os.path.basename(filename), filename))
834             os.unlink(filename)
835
836         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
837         return pkg
838     else:
839         return None
840
841 def get_source_name(pkg, repometadata):
842
843     def get_bin_name(pkg):
844         m = RPM_RE.match(pkg)
845         if m:
846             return m.group(1)
847         return None
848
849     def get_src_name(srpm):
850         m = SRPM_RE.match(srpm)
851         if m:
852             return m.group(1)
853         return None
854
855     ver = ""
856     target_repo = None
857
858     pkg_name = get_bin_name(pkg)
859     if not pkg_name:
860         return None
861
862     for repo in repometadata:
863         if repo["primary"].endswith(".xml"):
864             root = xmlparse(repo["primary"])
865             ns = root.getroot().tag
866             ns = ns[0:ns.rindex("}")+1]
867             for elm in root.getiterator("%spackage" % ns):
868                 if elm.find("%sname" % ns).text == pkg_name:
869                     if elm.find("%sarch" % ns).text != "src":
870                         version = elm.find("%sversion" % ns)
871                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
872                         if tmpver > ver:
873                             ver = tmpver
874                             fmt = elm.find("%sformat" % ns)
875                             if fmt:
876                                 fns = fmt.getchildren()[0].tag
877                                 fns = fns[0:fns.rindex("}")+1]
878                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
879                                 target_repo = repo
880                         break
881
882         if repo["primary"].endswith(".sqlite"):
883             con = sqlite.connect(repo["primary"])
884             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
885                 tmpver = "%s-%s" % (row[0], row[1])
886                 if tmpver > ver:
887                     pkgpath = "%s" % row[2]
888                     target_repo = repo
889                 break
890             con.close()
891     if target_repo:
892         return get_src_name(pkgpath)
893     else:
894         return None
895
896 def get_pkglist_in_patterns(group, patterns):
897     found = False
898     pkglist = []
899     try:
900         root = xmlparse(patterns)
901     except SyntaxError:
902         raise SyntaxError("%s syntax error." % patterns)
903
904     for elm in list(root.getroot()):
905         ns = elm.tag
906         ns = ns[0:ns.rindex("}")+1]
907         name = elm.find("%sname" % ns)
908         summary = elm.find("%ssummary" % ns)
909         if name.text == group or summary.text == group:
910             found = True
911             break
912
913     if not found:
914         return pkglist
915
916     found = False
917     for requires in list(elm):
918         if requires.tag.endswith("requires"):
919             found = True
920             break
921
922     if not found:
923         return pkglist
924
925     for pkg in list(requires):
926         pkgname = pkg.attrib["name"]
927         if pkgname not in pkglist:
928             pkglist.append(pkgname)
929
930     return pkglist
931
932 def get_pkglist_in_comps(group, comps):
933     found = False
934     pkglist = []
935     try:
936         root = xmlparse(comps)
937     except SyntaxError:
938         raise SyntaxError("%s syntax error." % comps)
939
940     for elm in root.getiterator("group"):
941         id = elm.find("id")
942         name = elm.find("name")
943         if id.text == group or name.text == group:
944             packagelist = elm.find("packagelist")
945             found = True
946             break
947
948     if not found:
949         return pkglist
950
951     for require in elm.getiterator("packagereq"):
952         if require.tag.endswith("packagereq"):
953             pkgname = require.text
954         if pkgname not in pkglist:
955             pkglist.append(pkgname)
956
957     return pkglist
958
959 def is_statically_linked(binary):
960     return ", statically linked, " in runner.outs(['file', binary])
961
962 def setup_qemu_emulator(rootdir, arch):
963     # mount binfmt_misc if it doesn't exist
964     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
965         modprobecmd = find_binary_path("modprobe")
966         runner.show([modprobecmd, "binfmt_misc"])
967     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
968         mountcmd = find_binary_path("mount")
969         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
970
971     # qemu_emulator is a special case, we can't use find_binary_path
972     # qemu emulator should be a statically-linked executable file
973     if arch == "aarch64":
974         arm_binary = "qemu-arm64"
975         node = "/proc/sys/fs/binfmt_misc/aarch64"
976     else:
977         arm_binary = "qemu-arm"
978         node = "/proc/sys/fs/binfmt_misc/arm"
979
980     qemu_emulator = "/usr/bin/%s" % arm_binary
981     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
982         qemu_emulator = "/usr/bin/%s-static" % arm_binary
983     if not os.path.exists(qemu_emulator):
984         raise CreatorError("Please install a statically-linked %s" % arm_binary)
985
986     if not os.path.exists(rootdir + "/usr/bin"):
987         makedirs(rootdir + "/usr/bin")
988     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
989
990     # disable selinux, selinux will block qemu emulator to run
991     if os.path.exists("/usr/sbin/setenforce"):
992         msger.info('Try to disable selinux')
993         runner.show(["/usr/sbin/setenforce", "0"])
994
995     # unregister it if it has been registered and is a dynamically-linked executable
996     if os.path.exists(node):
997         qemu_unregister_string = "-1\n"
998         with open(node, "w") as fd:
999             fd.write(qemu_unregister_string)
1000
1001     # register qemu emulator for interpreting other arch executable file
1002     if not os.path.exists(node):
1003         if arch == "aarch64":
1004             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
1005         else:
1006             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
1007         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
1008             fd.write(qemu_arm_string)
1009
1010     return qemu_emulator
1011
1012 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1013     def get_source_repometadata(repometadata):
1014         src_repometadata=[]
1015         for repo in repometadata:
1016             if repo["name"].endswith("-source"):
1017                 src_repometadata.append(repo)
1018         if src_repometadata:
1019             return src_repometadata
1020         return None
1021
1022     def get_src_name(srpm):
1023         m = SRPM_RE.match(srpm)
1024         if m:
1025             return m.group(1)
1026         return None
1027
1028     src_repometadata = get_source_repometadata(repometadata)
1029
1030     if not src_repometadata:
1031         msger.warning("No source repo found")
1032         return None
1033
1034     src_pkgs = []
1035     lpkgs_dict = {}
1036     lpkgs_path = []
1037     for repo in src_repometadata:
1038         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1039         lpkgs_path += glob.glob(cachepath)
1040
1041     for lpkg in lpkgs_path:
1042         lpkg_name = get_src_name(os.path.basename(lpkg))
1043         lpkgs_dict[lpkg_name] = lpkg
1044     localpkgs = lpkgs_dict.keys()
1045
1046     cached_count = 0
1047     destdir = instroot+'/usr/src/SRPMS'
1048     if not os.path.exists(destdir):
1049         os.makedirs(destdir)
1050
1051     srcpkgset = set()
1052     for _pkg in pkgs:
1053         srcpkg_name = get_source_name(_pkg, repometadata)
1054         if not srcpkg_name:
1055             continue
1056         srcpkgset.add(srcpkg_name)
1057
1058     for pkg in list(srcpkgset):
1059         if pkg in localpkgs:
1060             cached_count += 1
1061             shutil.copy(lpkgs_dict[pkg], destdir)
1062             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1063         else:
1064             src_pkg = get_package(pkg, src_repometadata, 'src')
1065             if src_pkg:
1066                 shutil.copy(src_pkg, destdir)
1067                 src_pkgs.append(src_pkg)
1068     msger.info("%d source packages gotten from cache" % cached_count)
1069
1070     return src_pkgs
1071
1072 def strip_end(text, suffix):
1073     if not text.endswith(suffix):
1074         return text
1075     return text[:-len(suffix)]