be14d01e9269265dc43e31f8fe820760f08c74b5
[platform/upstream/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.archive import get_archive_suffixes
45 from mic.utils.errors import CreatorError, SquashfsError
46 from mic.utils.fs_related import find_binary_path, makedirs
47 from mic.utils.grabber import myurlgrab
48 from mic.utils.proxy import get_proxy_for
49 from mic.utils import runner
50 from mic.utils import rpmmisc
51 from mic.utils.safeurl import SafeURL
52
53
54 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
57
58
59 def build_name(kscfg, release=None, prefix = None, suffix = None):
60     """Construct and return an image name string.
61
62     This is a utility function to help create sensible name and fslabel
63     strings. The name is constructed using the sans-prefix-and-extension
64     kickstart filename and the supplied prefix and suffix.
65
66     kscfg -- a path to a kickstart file
67     release --  a replacement to suffix for image release
68     prefix -- a prefix to prepend to the name; defaults to None, which causes
69               no prefix to be used
70     suffix -- a suffix to append to the name; defaults to None, which causes
71               a YYYYMMDDHHMM suffix to be used
72
73     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
74
75     """
76     name = os.path.basename(kscfg)
77     idx = name.rfind('.')
78     if idx >= 0:
79         name = name[:idx]
80
81     if release is not None:
82         suffix = ""
83     if prefix is None:
84         prefix = ""
85     if suffix is None:
86         suffix = time.strftime("%Y%m%d%H%M")
87
88     if name.startswith(prefix):
89         name = name[len(prefix):]
90
91     prefix = "%s-" % prefix if prefix else ""
92     suffix = "-%s" % suffix if suffix else ""
93
94     ret = prefix + name + suffix
95     return ret
96
97 def get_distro():
98     """Detect linux distribution, support "meego"
99     """
100
101     support_dists = ('SuSE',
102                      'debian',
103                      'fedora',
104                      'redhat',
105                      'centos',
106                      'meego',
107                      'moblin',
108                      'tizen')
109     try:
110         (dist, ver, id) = platform.linux_distribution( \
111                               supported_dists = support_dists)
112     except:
113         (dist, ver, id) = platform.dist( \
114                               supported_dists = support_dists)
115
116     return (dist, ver, id)
117
118 def get_hostname():
119     """Get hostname
120     """
121     return platform.node()
122
123 def get_hostname_distro_str():
124     """Get composited string for current linux distribution
125     """
126     (dist, ver, id) = get_distro()
127     hostname = get_hostname()
128
129     if not dist:
130         return "%s(Unknown Linux Distribution)" % hostname
131     else:
132         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
133         return distro_str.strip()
134
135 _LOOP_RULE_PTH = None
136
137 def hide_loopdev_presentation():
138     udev_rules = "80-prevent-loop-present.rules"
139     udev_rules_dir = [
140                        '/usr/lib/udev/rules.d/',
141                        '/lib/udev/rules.d/',
142                        '/etc/udev/rules.d/'
143                      ]
144
145     global _LOOP_RULE_PTH
146
147     for rdir in udev_rules_dir:
148         if os.path.exists(rdir):
149             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
150
151     if not _LOOP_RULE_PTH:
152         return
153
154     try:
155         with open(_LOOP_RULE_PTH, 'w') as wf:
156             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
157
158         runner.quiet('udevadm trigger')
159     except:
160         pass
161
162 def unhide_loopdev_presentation():
163     global _LOOP_RULE_PTH
164
165     if not _LOOP_RULE_PTH:
166         return
167
168     try:
169         os.unlink(_LOOP_RULE_PTH)
170         runner.quiet('udevadm trigger')
171     except:
172         pass
173
174 def extract_rpm(rpmfile, targetdir):
175     rpm2cpio = find_binary_path("rpm2cpio")
176     cpio = find_binary_path("cpio")
177
178     olddir = os.getcwd()
179     os.chdir(targetdir)
180
181     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
182     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
183     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
184                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
185     p1.stdout.close()
186     (sout, serr) = p2.communicate()
187     msger.verbose(sout or serr)
188
189     os.chdir(olddir)
190
191 def human_size(size):
192     """Return human readable string for Bytes size
193     """
194
195     if size <= 0:
196         return "0M"
197     import math
198     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
199     expo = int(math.log(size, 1024))
200     mant = float(size/math.pow(1024, expo))
201     return "{0:.1f}{1:s}".format(mant, measure[expo])
202
203 def get_block_size(file_obj):
204     """ Returns block size for file object 'file_obj'. Errors are indicated by
205     the 'IOError' exception. """
206
207     from fcntl import ioctl
208     import struct
209
210     # Get the block size of the host file-system for the image file by calling
211     # the FIGETBSZ ioctl (number 2).
212     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
213     return struct.unpack('I', binary_data)[0]
214
215 def check_space_pre_cp(src, dst):
216     """Check whether disk space is enough before 'cp' like
217     operations, else exception will be raised.
218     """
219
220     srcsize  = get_file_size(src) * 1024 * 1024
221     freesize = get_filesystem_avail(dst)
222     if srcsize > freesize:
223         raise CreatorError("space on %s(%s) is not enough for about %s files"
224                            % (dst, human_size(freesize), human_size(srcsize)))
225
226 def calc_hashes(file_path, hash_names, start = 0, end = None):
227     """ Calculate hashes for a file. The 'file_path' argument is the file
228     to calculate hash functions for, 'start' and 'end' are the starting and
229     ending file offset to calculate the has functions for. The 'hash_names'
230     argument is a list of hash names to calculate. Returns the the list
231     of calculated hash values in the hexadecimal form in the same order
232     as 'hash_names'.
233     """
234     if end == None:
235         end = os.path.getsize(file_path)
236
237     chunk_size = 65536
238     to_read = end - start
239     read = 0
240
241     hashes = []
242     for hash_name in hash_names:
243         hashes.append(hashlib.new(hash_name))
244
245     with open(file_path, "rb") as f:
246         f.seek(start)
247
248         while read < to_read:
249             if read + chunk_size > to_read:
250                 chunk_size = to_read - read
251             chunk = f.read(chunk_size)
252             for hash_obj in hashes:
253                 hash_obj.update(chunk)
254             read += chunk_size
255
256     result = []
257     for hash_obj in hashes:
258         result.append(hash_obj.hexdigest())
259
260     return result
261
262 def get_md5sum(fpath):
263     return calc_hashes(fpath, ('md5', ))[0]
264
265 def get_sha1sum(fpath):
266     return calc_hashes(fpath, ('sha1', ))[0]
267
268 def get_sha256sum(fpath):
269     return calc_hashes(fpath, ('sha256', ))[0]
270
271 def normalize_ksfile(ksconf, release, arch):
272     '''
273     Return the name of a normalized ks file in which macro variables
274     @BUILD_ID@ and @ARCH@ are replace with real values.
275
276     The original ks file is returned if no special macro is used, otherwise
277     a temp file is created and returned, which will be deleted when program
278     exits normally.
279     '''
280
281     if not release:
282         release = "latest"
283     if not arch or re.match(r'i.86', arch):
284         arch = "ia32"
285
286     with open(ksconf) as f:
287         ksc = f.read()
288
289     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
290         return ksconf
291
292     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
293     ksc = ksc.replace("@ARCH@", arch)
294     ksc = ksc.replace("@BUILD_ID@", release)
295
296     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
297     os.write(fd, ksc)
298     os.close(fd)
299
300     msger.debug('normalized ks file:%s' % ksconf)
301
302     def remove_temp_ks():
303         try:
304             os.unlink(ksconf)
305         except OSError, err:
306             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
307
308     import atexit
309     atexit.register(remove_temp_ks)
310
311     return ksconf
312
313
314 def _check_mic_chroot(rootdir):
315     def _path(path):
316         return rootdir.rstrip('/') + path
317
318     release_files = map(_path, [ "/etc/moblin-release",
319                                  "/etc/meego-release",
320                                  "/etc/tizen-release"])
321
322     if not any(map(os.path.exists, release_files)):
323         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
324
325     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
326         msger.warning("Failed to find kernel module under %s" % rootdir)
327
328     return
329
330 def selinux_check(arch, fstypes):
331     try:
332         getenforce = find_binary_path('getenforce')
333     except CreatorError:
334         return
335
336     selinux_status = runner.outs([getenforce])
337     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
338         raise CreatorError("Can't create arm image if selinux is enabled, "
339                            "please run 'setenforce 0' to disable selinux")
340
341     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
342     if use_btrfs and selinux_status == "Enforcing":
343         raise CreatorError("Can't create btrfs image if selinux is enabled,"
344                            " please run 'setenforce 0' to disable selinux")
345
346 def get_image_type(path):
347     def _get_extension_name(path):
348         match = re.search("(?<=\.)\w+$", path)
349         if match:
350             return match.group(0)
351         else:
352             return None
353
354     if os.path.isdir(path):
355         _check_mic_chroot(path)
356         return "fs"
357
358     maptab = {
359               "tar": "loop",
360               "raw":"raw",
361               "vmdk":"vmdk",
362               "vdi":"vdi",
363               "iso":"livecd",
364               "usbimg":"liveusb",
365              }
366
367     extension = _get_extension_name(path)
368     if extension in maptab:
369         return maptab[extension]
370
371     fd = open(path, "rb")
372     file_header = fd.read(1024)
373     fd.close()
374     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
375     if file_header[0:len(vdi_flag)] == vdi_flag:
376         return maptab["vdi"]
377
378     output = runner.outs(['file', path])
379     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
380     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
381     rawptn = re.compile(r".*x86 boot sector.*")
382     vmdkptn = re.compile(r".*VMware. disk image.*")
383     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
384     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
385     btrfsimgptn = re.compile(r".*BTRFS.*")
386     if isoptn.match(output):
387         return maptab["iso"]
388     elif usbimgptn.match(output):
389         return maptab["usbimg"]
390     elif rawptn.match(output):
391         return maptab["raw"]
392     elif vmdkptn.match(output):
393         return maptab["vmdk"]
394     elif ext3fsimgptn.match(output):
395         return "ext3fsimg"
396     elif ext4fsimgptn.match(output):
397         return "ext4fsimg"
398     elif btrfsimgptn.match(output):
399         return "btrfsimg"
400     else:
401         raise CreatorError("Cannot detect the type of image: %s" % path)
402
403
404 def get_file_size(filename):
405     """ Return size in MB unit """
406     cmd = ['du', "-s", "-b", "-B", "1M", filename]
407     rc, duOutput  = runner.runtool(cmd)
408     if rc != 0:
409         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
410     size1 = int(duOutput.split()[0])
411
412     cmd = ['du', "-s", "-B", "1M", filename]
413     rc, duOutput = runner.runtool(cmd)
414     if rc != 0:
415         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
416
417     size2 = int(duOutput.split()[0])
418     return max(size1, size2)
419
420
421 def get_filesystem_avail(fs):
422     vfstat = os.statvfs(fs)
423     return vfstat.f_bavail * vfstat.f_bsize
424
425 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
426     #convert disk format
427     if dstfmt != "raw":
428         raise CreatorError("Invalid destination image format: %s" % dstfmt)
429     msger.debug("converting %s image to %s" % (srcimg, dstimg))
430     if srcfmt == "vmdk":
431         path = find_binary_path("qemu-img")
432         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
433     elif srcfmt == "vdi":
434         path = find_binary_path("VBoxManage")
435         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
436     else:
437         raise CreatorError("Invalid soure image format: %s" % srcfmt)
438
439     rc = runner.show(argv)
440     if rc == 0:
441         msger.debug("convert successful")
442     if rc != 0:
443         raise CreatorError("Unable to convert disk to %s" % dstfmt)
444
445 def uncompress_squashfs(squashfsimg, outdir):
446     """Uncompress file system from squshfs image"""
447     unsquashfs = find_binary_path("unsquashfs")
448     args = [ unsquashfs, "-d", outdir, squashfsimg ]
449     rc = runner.show(args)
450     if (rc != 0):
451         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
452
453 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
454     """ FIXME: use the dir in mic.conf instead """
455
456     makedirs(dir)
457     return tempfile.mkdtemp(dir = dir, prefix = prefix)
458
459 def get_repostrs_from_ks(ks):
460     def _get_temp_reponame(baseurl):
461         md5obj = hashlib.md5(baseurl)
462         tmpreponame = "%s" % md5obj.hexdigest()
463         return tmpreponame
464
465     kickstart_repos = []
466
467     for repodata in ks.handler.repo.repoList:
468         repo = {}
469         for attr in ('name',
470                      'baseurl',
471                      'mirrorlist',
472                      'includepkgs', # val is list
473                      'excludepkgs', # val is list
474                      'cost',    # int
475                      'priority',# int
476                      'save',
477                      'proxy',
478                      'proxyuser',
479                      'proxypasswd',
480                      'proxypasswd',
481                      'debuginfo',
482                      'source',
483                      'gpgkey',
484                      'ssl_verify'):
485             if hasattr(repodata, attr) and getattr(repodata, attr):
486                 repo[attr] = getattr(repodata, attr)
487
488         if 'name' not in repo:
489             repo['name'] = _get_temp_reponame(repodata.baseurl)
490         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
491             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
492                                       getattr(repodata, 'user', None),
493                                       getattr(repodata, 'passwd', None))
494
495         kickstart_repos.append(repo)
496
497     return kickstart_repos
498
499 def _get_uncompressed_data_from_url(url, filename, proxies):
500     filename = myurlgrab(url.full, filename, proxies)
501     suffix = None
502     if filename.endswith(".gz"):
503         suffix = ".gz"
504         runner.quiet(['gunzip', "-f", filename])
505     elif filename.endswith(".bz2"):
506         suffix = ".bz2"
507         runner.quiet(['bunzip2', "-f", filename])
508     if suffix:
509         filename = filename.replace(suffix, "")
510     return filename
511
512 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
513                             sumtype=None, checksum=None):
514     url = baseurl.join(filename)
515     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
516     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
517         filename = os.path.splitext(filename_tmp)[0]
518     else:
519         filename = filename_tmp
520     if sumtype and checksum and os.path.exists(filename):
521         try:
522             sumcmd = find_binary_path("%ssum" % sumtype)
523         except:
524             file_checksum = None
525         else:
526             file_checksum = runner.outs([sumcmd, filename]).split()[0]
527
528         if file_checksum and file_checksum == checksum:
529             return filename
530
531     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
532
533 def get_metadata_from_repos(repos, cachedir):
534     my_repo_metadata = []
535     for repo in repos:
536         reponame = repo.name
537         baseurl = repo.baseurl
538
539         if hasattr(repo, 'proxy'):
540             proxy = repo.proxy
541         else:
542             proxy = get_proxy_for(baseurl)
543
544         proxies = None
545         if proxy:
546             proxies = {str(baseurl.split(":")[0]): str(proxy)}
547
548         makedirs(os.path.join(cachedir, reponame))
549         url = baseurl.join("repodata/repomd.xml")
550         filename = os.path.join(cachedir, reponame, 'repomd.xml')
551         repomd = myurlgrab(url.full, filename, proxies)
552         try:
553             root = xmlparse(repomd)
554         except SyntaxError:
555             raise CreatorError("repomd.xml syntax error.")
556
557         ns = root.getroot().tag
558         ns = ns[0:ns.rindex("}")+1]
559
560         filepaths = {}
561         checksums = {}
562         sumtypes = {}
563
564         for elm in root.getiterator("%sdata" % ns):
565             if elm.attrib["type"] == "patterns":
566                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
567                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
568                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
569                 break
570
571         for elm in root.getiterator("%sdata" % ns):
572             if elm.attrib["type"] in ("group_gz", "group"):
573                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
574                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
575                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
576                 break
577
578         primary_type = None
579         for elm in root.getiterator("%sdata" % ns):
580             if elm.attrib["type"] in ("primary_db", "primary"):
581                 primary_type = elm.attrib["type"]
582                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
583                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
584                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
585                 break
586
587         if not primary_type:
588             continue
589
590         for item in ("primary", "patterns", "comps"):
591             if item not in filepaths:
592                 filepaths[item] = None
593                 continue
594             if not filepaths[item]:
595                 continue
596             filepaths[item] = _get_metadata_from_repo(baseurl,
597                                                       proxies,
598                                                       cachedir,
599                                                       reponame,
600                                                       filepaths[item],
601                                                       sumtypes[item],
602                                                       checksums[item])
603
604         """ Get repo key """
605         try:
606             repokey = _get_metadata_from_repo(baseurl,
607                                               proxies,
608                                               cachedir,
609                                               reponame,
610                                               "repodata/repomd.xml.key")
611         except CreatorError:
612             repokey = None
613             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
614
615         my_repo_metadata.append({"name":reponame,
616                                  "baseurl":baseurl,
617                                  "repomd":repomd,
618                                  "primary":filepaths['primary'],
619                                  "cachedir":cachedir,
620                                  "proxies":proxies,
621                                  "patterns":filepaths['patterns'],
622                                  "comps":filepaths['comps'],
623                                  "repokey":repokey,
624                                  "priority":repo.priority})
625
626     return my_repo_metadata
627
628 def get_rpmver_in_repo(repometadata):
629     for repo in repometadata:
630         if repo["primary"].endswith(".xml"):
631             root = xmlparse(repo["primary"])
632             ns = root.getroot().tag
633             ns = ns[0:ns.rindex("}")+1]
634
635             versionlist = []
636             for elm in root.getiterator("%spackage" % ns):
637                 if elm.find("%sname" % ns).text == 'rpm':
638                     for node in elm.getchildren():
639                         if node.tag == "%sversion" % ns:
640                             versionlist.append(node.attrib['ver'])
641
642             if versionlist:
643                 return reversed(
644                          sorted(
645                            versionlist,
646                            key = lambda ver: map(int, ver.split('.')))).next()
647
648         elif repo["primary"].endswith(".sqlite"):
649             con = sqlite.connect(repo["primary"])
650             for row in con.execute("select version from packages where "
651                                    "name=\"rpm\" ORDER by version DESC"):
652                 con.close()
653                 return row[0]
654
655     return None
656
657 def get_arch(repometadata):
658     archlist = []
659     for repo in repometadata:
660         if repo["primary"].endswith(".xml"):
661             root = xmlparse(repo["primary"])
662             ns = root.getroot().tag
663             ns = ns[0:ns.rindex("}")+1]
664             for elm in root.getiterator("%spackage" % ns):
665                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
666                     arch = elm.find("%sarch" % ns).text
667                     if arch not in archlist:
668                         archlist.append(arch)
669         elif repo["primary"].endswith(".sqlite"):
670             con = sqlite.connect(repo["primary"])
671             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
672                 if row[0] not in archlist:
673                     archlist.append(row[0])
674
675             con.close()
676
677     uniq_arch = []
678     for i in range(len(archlist)):
679         if archlist[i] not in rpmmisc.archPolicies.keys():
680             continue
681         need_append = True
682         j = 0
683         while j < len(uniq_arch):
684             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
685                 need_append = False
686                 break
687             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
688                 if need_append:
689                     uniq_arch[j] = archlist[i]
690                     need_append = False
691                 else:
692                     uniq_arch.remove(uniq_arch[j])
693                     continue
694             j += 1
695         if need_append:
696              uniq_arch.append(archlist[i])
697
698     return uniq_arch, archlist
699
700 def get_package(pkg, repometadata, arch = None):
701     ver = ""
702     priority = 99
703     target_repo = None
704     if not arch:
705         arches = []
706     elif arch not in rpmmisc.archPolicies:
707         arches = [arch]
708     else:
709         arches = rpmmisc.archPolicies[arch].split(':')
710         arches.append('noarch')
711
712     for repo in repometadata:
713         if repo["primary"].endswith(".xml"):
714             root = xmlparse(repo["primary"])
715             ns = root.getroot().tag
716             ns = ns[0:ns.rindex("}")+1]
717             for elm in root.getiterator("%spackage" % ns):
718                 if elm.find("%sname" % ns).text == pkg:
719                     if elm.find("%sarch" % ns).text in arches:
720                         if repo["priority"] != None:
721                             tmpprior = int(repo["priority"])
722                             if tmpprior < priority:
723                                 priority = tmpprior
724                                 location = elm.find("%slocation" % ns)
725                                 pkgpath = "%s" % location.attrib['href']
726                                 target_repo = repo
727                                 break
728                             elif tmpprior > priority:
729                                 break
730                         version = elm.find("%sversion" % ns)
731                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
732                         if tmpver > ver:
733                             ver = tmpver
734                             location = elm.find("%slocation" % ns)
735                             pkgpath = "%s" % location.attrib['href']
736                             target_repo = repo
737                         break
738         if repo["primary"].endswith(".sqlite"):
739             con = sqlite.connect(repo["primary"])
740             if arch:
741                 sql = 'select version, release, location_href from packages ' \
742                       'where name = "%s" and arch IN ("%s")' % \
743                       (pkg, '","'.join(arches))
744                 for row in con.execute(sql):
745                     tmpver = "%s-%s" % (row[0], row[1])
746                     if tmpver > ver:
747                         ver = tmpver
748                         pkgpath = "%s" % row[2]
749                         target_repo = repo
750                     break
751             else:
752                 sql = 'select version, release, location_href from packages ' \
753                       'where name = "%s"' % pkg
754                 for row in con.execute(sql):
755                     tmpver = "%s-%s" % (row[0], row[1])
756                     if tmpver > ver:
757                         ver = tmpver
758                         pkgpath = "%s" % row[2]
759                         target_repo = repo
760                     break
761             con.close()
762     if target_repo:
763         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
764         url = target_repo["baseurl"].join(pkgpath)
765         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
766         if os.path.exists(filename):
767             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
768             if ret == 0:
769                 return filename
770
771             msger.warning("package %s is damaged: %s" %
772                           (os.path.basename(filename), filename))
773             os.unlink(filename)
774
775         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
776         return pkg
777     else:
778         return None
779
780 def get_source_name(pkg, repometadata):
781
782     def get_bin_name(pkg):
783         m = RPM_RE.match(pkg)
784         if m:
785             return m.group(1)
786         return None
787
788     def get_src_name(srpm):
789         m = SRPM_RE.match(srpm)
790         if m:
791             return m.group(1)
792         return None
793
794     ver = ""
795     target_repo = None
796
797     pkg_name = get_bin_name(pkg)
798     if not pkg_name:
799         return None
800
801     for repo in repometadata:
802         if repo["primary"].endswith(".xml"):
803             root = xmlparse(repo["primary"])
804             ns = root.getroot().tag
805             ns = ns[0:ns.rindex("}")+1]
806             for elm in root.getiterator("%spackage" % ns):
807                 if elm.find("%sname" % ns).text == pkg_name:
808                     if elm.find("%sarch" % ns).text != "src":
809                         version = elm.find("%sversion" % ns)
810                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
811                         if tmpver > ver:
812                             ver = tmpver
813                             fmt = elm.find("%sformat" % ns)
814                             if fmt:
815                                 fns = fmt.getchildren()[0].tag
816                                 fns = fns[0:fns.rindex("}")+1]
817                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
818                                 target_repo = repo
819                         break
820
821         if repo["primary"].endswith(".sqlite"):
822             con = sqlite.connect(repo["primary"])
823             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
824                 tmpver = "%s-%s" % (row[0], row[1])
825                 if tmpver > ver:
826                     pkgpath = "%s" % row[2]
827                     target_repo = repo
828                 break
829             con.close()
830     if target_repo:
831         return get_src_name(pkgpath)
832     else:
833         return None
834
835 def get_pkglist_in_patterns(group, patterns):
836     found = False
837     pkglist = []
838     try:
839         root = xmlparse(patterns)
840     except SyntaxError:
841         raise SyntaxError("%s syntax error." % patterns)
842
843     for elm in list(root.getroot()):
844         ns = elm.tag
845         ns = ns[0:ns.rindex("}")+1]
846         name = elm.find("%sname" % ns)
847         summary = elm.find("%ssummary" % ns)
848         if name.text == group or summary.text == group:
849             found = True
850             break
851
852     if not found:
853         return pkglist
854
855     found = False
856     for requires in list(elm):
857         if requires.tag.endswith("requires"):
858             found = True
859             break
860
861     if not found:
862         return pkglist
863
864     for pkg in list(requires):
865         pkgname = pkg.attrib["name"]
866         if pkgname not in pkglist:
867             pkglist.append(pkgname)
868
869     return pkglist
870
871 def get_pkglist_in_comps(group, comps):
872     found = False
873     pkglist = []
874     try:
875         root = xmlparse(comps)
876     except SyntaxError:
877         raise SyntaxError("%s syntax error." % comps)
878
879     for elm in root.getiterator("group"):
880         id = elm.find("id")
881         name = elm.find("name")
882         if id.text == group or name.text == group:
883             packagelist = elm.find("packagelist")
884             found = True
885             break
886
887     if not found:
888         return pkglist
889
890     for require in elm.getiterator("packagereq"):
891         if require.tag.endswith("packagereq"):
892             pkgname = require.text
893         if pkgname not in pkglist:
894             pkglist.append(pkgname)
895
896     return pkglist
897
898 def is_statically_linked(binary):
899     return ", statically linked, " in runner.outs(['file', binary])
900
901 def setup_qemu_emulator(rootdir, arch):
902     qemu_emulators = []
903     # mount binfmt_misc if it doesn't exist
904     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
905         modprobecmd = find_binary_path("modprobe")
906         runner.show([modprobecmd, "binfmt_misc"])
907     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
908         mountcmd = find_binary_path("mount")
909         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
910
911     # qemu_emulator is a special case, we can't use find_binary_path
912     # qemu emulator should be a statically-linked executable file
913     if arch == "aarch64":
914         node = "/proc/sys/fs/binfmt_misc/aarch64"
915         if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
916             arm_binary = "qemu-arm64"
917         elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
918             arm_binary = "qemu-aarch64"
919         elif os.path.exists("/usr/bin/qemu-arm64-static"):
920             arm_binary = "qemu-arm64-static"
921         elif os.path.exists("/usr/bin/qemu-aarch64-static"):
922             arm_binary = "qemu-aarch64-static"
923         else:
924             raise CreatorError("Please install a statically-linked %s" % arm_binary)
925     elif arch == "mipsel":
926         node = "/proc/sys/fs/binfmt_misc/mipsel"
927         arm_binary = "qemu-mipsel"
928         if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
929             arm_binary = "qemu-mipsel-static"
930         if not os.path.exists("/usr/bin/%s" % arm_binary):
931             raise CreatorError("Please install a statically-linked %s" % arm_binary)
932     else:
933         node = "/proc/sys/fs/binfmt_misc/arm"
934         arm_binary = "qemu-arm"
935         if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
936             arm_binary = "qemu-arm-static"
937         if not os.path.exists("/usr/bin/%s" % arm_binary):
938             raise CreatorError("Please install a statically-linked %s" % arm_binary)
939
940     qemu_emulator = "/usr/bin/%s" % arm_binary
941
942     if not os.path.exists(rootdir + "/usr/bin"):
943         makedirs(rootdir + "/usr/bin")
944     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
945     qemu_emulators.append(qemu_emulator)
946
947     # disable selinux, selinux will block qemu emulator to run
948     if os.path.exists("/usr/sbin/setenforce"):
949         msger.info('Try to disable selinux')
950         runner.show(["/usr/sbin/setenforce", "0"])
951
952     # register qemu emulator for interpreting other arch executable file
953     if not os.path.exists(node):
954         if arch == "aarch64":
955             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
956         elif arch == "mipsel":
957             qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
958         else:
959             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
960
961         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
962             fd.write(qemu_arm_string)
963     else:
964         flags = ""
965         interpreter = ""
966         with open(node, "r") as fd:
967             for line in fd.readlines():
968                 if line.startswith("flags:"):
969                     flags = line[len("flags:"):].strip()
970                 elif line.startswith("interpreter"):
971                     interpreter = line[len("interpreter"):].strip()
972
973         if flags == "P" and interpreter.endswith("-binfmt"):
974             # copy binfmt wrapper when preserve-argv[0] flag is enabled
975             shutil.copy(os.path.realpath(interpreter), rootdir + interpreter)
976             qemu_emulators.append(interpreter)
977         elif not flags and interpreter != qemu_emulator:
978             # create symlink as registered qemu emulator
979             os.symlink(qemu_emulator, rootdir + interpreter)
980             qemu_emulators.append(interpreter)
981
982     return qemu_emulators
983
984 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
985     def get_source_repometadata(repometadata):
986         src_repometadata=[]
987         for repo in repometadata:
988             if repo["name"].endswith("-source"):
989                 src_repometadata.append(repo)
990         if src_repometadata:
991             return src_repometadata
992         return None
993
994     def get_src_name(srpm):
995         m = SRPM_RE.match(srpm)
996         if m:
997             return m.group(1)
998         return None
999
1000     src_repometadata = get_source_repometadata(repometadata)
1001
1002     if not src_repometadata:
1003         msger.warning("No source repo found")
1004         return None
1005
1006     src_pkgs = []
1007     lpkgs_dict = {}
1008     lpkgs_path = []
1009     for repo in src_repometadata:
1010         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1011         lpkgs_path += glob.glob(cachepath)
1012
1013     for lpkg in lpkgs_path:
1014         lpkg_name = get_src_name(os.path.basename(lpkg))
1015         lpkgs_dict[lpkg_name] = lpkg
1016     localpkgs = lpkgs_dict.keys()
1017
1018     cached_count = 0
1019     destdir = instroot+'/usr/src/SRPMS'
1020     if not os.path.exists(destdir):
1021         os.makedirs(destdir)
1022
1023     srcpkgset = set()
1024     for _pkg in pkgs:
1025         srcpkg_name = get_source_name(_pkg, repometadata)
1026         if not srcpkg_name:
1027             continue
1028         srcpkgset.add(srcpkg_name)
1029
1030     for pkg in list(srcpkgset):
1031         if pkg in localpkgs:
1032             cached_count += 1
1033             shutil.copy(lpkgs_dict[pkg], destdir)
1034             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1035         else:
1036             src_pkg = get_package(pkg, src_repometadata, 'src')
1037             if src_pkg:
1038                 shutil.copy(src_pkg, destdir)
1039                 src_pkgs.append(src_pkg)
1040     msger.info("%d source packages gotten from cache" % cached_count)
1041
1042     return src_pkgs
1043
1044 def strip_end(text, suffix):
1045     if not text.endswith(suffix):
1046         return text
1047     return text[:-len(suffix)]
1048
1049 def strip_archive_suffix(filename):
1050     for suffix in get_archive_suffixes():
1051         if filename.endswith(suffix):
1052             return filename[:-len(suffix)]
1053     else:
1054         msger.warning("Not supported archive file format: %s" % filename)
1055     return None