Merge release-0.28.17 from 'tools/mic'
[platform/upstream/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.archive import get_archive_suffixes
45 from mic.utils.errors import CreatorError, SquashfsError
46 from mic.utils.fs_related import find_binary_path, makedirs
47 from mic.utils.grabber import myurlgrab
48 from mic.utils.proxy import get_proxy_for
49 from mic.utils import runner
50 from mic.utils import rpmmisc
51 from mic.utils.safeurl import SafeURL
52
53
54 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
57
58
59 def build_name(kscfg, release=None, prefix = None, suffix = None):
60     """Construct and return an image name string.
61
62     This is a utility function to help create sensible name and fslabel
63     strings. The name is constructed using the sans-prefix-and-extension
64     kickstart filename and the supplied prefix and suffix.
65
66     kscfg -- a path to a kickstart file
67     release --  a replacement to suffix for image release
68     prefix -- a prefix to prepend to the name; defaults to None, which causes
69               no prefix to be used
70     suffix -- a suffix to append to the name; defaults to None, which causes
71               a YYYYMMDDHHMM suffix to be used
72
73     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
74
75     """
76     name = os.path.basename(kscfg)
77     idx = name.rfind('.')
78     if idx >= 0:
79         name = name[:idx]
80
81     if release is not None:
82         suffix = ""
83     if prefix is None:
84         prefix = ""
85     if suffix is None:
86         suffix = time.strftime("%Y%m%d%H%M")
87
88     if name.startswith(prefix):
89         name = name[len(prefix):]
90
91     prefix = "%s-" % prefix if prefix else ""
92     suffix = "-%s" % suffix if suffix else ""
93
94     ret = prefix + name + suffix
95     return ret
96
97 def get_distro():
98     """Detect linux distribution, support "meego"
99     """
100
101     support_dists = ('SuSE',
102                      'debian',
103                      'fedora',
104                      'redhat',
105                      'centos',
106                      'meego',
107                      'moblin',
108                      'tizen')
109     try:
110         (dist, ver, id) = platform.linux_distribution( \
111                               supported_dists = support_dists)
112     except:
113         (dist, ver, id) = platform.dist( \
114                               supported_dists = support_dists)
115
116     return (dist, ver, id)
117
118 def get_hostname():
119     """Get hostname
120     """
121     return platform.node()
122
123 def get_hostname_distro_str():
124     """Get composited string for current linux distribution
125     """
126     (dist, ver, id) = get_distro()
127     hostname = get_hostname()
128
129     if not dist:
130         return "%s(Unknown Linux Distribution)" % hostname
131     else:
132         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
133         return distro_str.strip()
134
135 _LOOP_RULE_PTH = None
136
137 def hide_loopdev_presentation():
138     udev_rules = "80-prevent-loop-present.rules"
139     udev_rules_dir = [
140                        '/usr/lib/udev/rules.d/',
141                        '/lib/udev/rules.d/',
142                        '/etc/udev/rules.d/'
143                      ]
144
145     global _LOOP_RULE_PTH
146
147     for rdir in udev_rules_dir:
148         if os.path.exists(rdir):
149             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
150
151     if not _LOOP_RULE_PTH:
152         return
153
154     try:
155         with open(_LOOP_RULE_PTH, 'w') as wf:
156             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
157
158         runner.quiet('udevadm trigger')
159     except:
160         pass
161
162 def unhide_loopdev_presentation():
163     #global _LOOP_RULE_PTH
164
165     if not _LOOP_RULE_PTH:
166         return
167
168     try:
169         os.unlink(_LOOP_RULE_PTH)
170         runner.quiet('udevadm trigger')
171     except:
172         pass
173
174 def extract_rpm(rpmfile, targetdir):
175     rpm2cpio = find_binary_path("rpm2cpio")
176     cpio = find_binary_path("cpio")
177
178     olddir = os.getcwd()
179     os.chdir(targetdir)
180
181     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
182     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
183     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
184                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
185     p1.stdout.close()
186     (sout, serr) = p2.communicate()
187     msger.verbose(sout or serr)
188
189     os.chdir(olddir)
190
191 def human_size(size):
192     """Return human readable string for Bytes size
193     """
194
195     if size <= 0:
196         return "0M"
197     import math
198     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
199     expo = int(math.log(size, 1024))
200     mant = float(size/math.pow(1024, expo))
201     return "{0:.1f}{1:s}".format(mant, measure[expo])
202
203 def get_block_size(file_obj):
204     """ Returns block size for file object 'file_obj'. Errors are indicated by
205     the 'IOError' exception. """
206
207     from fcntl import ioctl
208     import struct
209
210     # Get the block size of the host file-system for the image file by calling
211     # the FIGETBSZ ioctl (number 2).
212     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
213     return struct.unpack('I', binary_data)[0]
214
215 def check_space_pre_cp(src, dst):
216     """Check whether disk space is enough before 'cp' like
217     operations, else exception will be raised.
218     """
219
220     srcsize  = get_file_size(src) * 1024 * 1024
221     freesize = get_filesystem_avail(dst)
222     if srcsize > freesize:
223         raise CreatorError("space on %s(%s) is not enough for about %s files"
224                            % (dst, human_size(freesize), human_size(srcsize)))
225
226 def calc_hashes(file_path, hash_names, start = 0, end = None):
227     """ Calculate hashes for a file. The 'file_path' argument is the file
228     to calculate hash functions for, 'start' and 'end' are the starting and
229     ending file offset to calculate the has functions for. The 'hash_names'
230     argument is a list of hash names to calculate. Returns the the list
231     of calculated hash values in the hexadecimal form in the same order
232     as 'hash_names'.
233     """
234     if end == None:
235         end = os.path.getsize(file_path)
236
237     chunk_size = 65536
238     to_read = end - start
239     read = 0
240
241     hashes = []
242     for hash_name in hash_names:
243         hashes.append(hashlib.new(hash_name))
244
245     with open(file_path, "rb") as f:
246         f.seek(start)
247
248         while read < to_read:
249             if read + chunk_size > to_read:
250                 chunk_size = to_read - read
251             chunk = f.read(chunk_size)
252             for hash_obj in hashes:
253                 hash_obj.update(chunk)
254             read += chunk_size
255
256     result = []
257     for hash_obj in hashes:
258         result.append(hash_obj.hexdigest())
259
260     return result
261
262 def get_md5sum(fpath):
263     return calc_hashes(fpath, ('md5', ))[0]
264
265 def get_sha1sum(fpath):
266     return calc_hashes(fpath, ('sha1', ))[0]
267
268 def get_sha256sum(fpath):
269     return calc_hashes(fpath, ('sha256', ))[0]
270
271 def normalize_ksfile(ksconf, release, arch):
272     '''
273     Return the name of a normalized ks file in which macro variables
274     @BUILD_ID@ and @ARCH@ are replace with real values.
275
276     The original ks file is returned if no special macro is used, otherwise
277     a temp file is created and returned, which will be deleted when program
278     exits normally.
279     '''
280
281     if not release:
282         release = "latest"
283     if not arch or re.match(r'i.86', arch):
284         arch = "ia32"
285
286     with open(ksconf) as f:
287         ksc = f.read()
288
289     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
290         return ksconf
291
292     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
293     ksc = ksc.replace("@ARCH@", arch)
294     ksc = ksc.replace("@BUILD_ID@", release)
295
296     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
297     os.write(fd, ksc)
298     os.close(fd)
299
300     msger.debug('normalized ks file:%s' % ksconf)
301
302     def remove_temp_ks():
303         try:
304             os.unlink(ksconf)
305         except OSError as err:
306             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
307
308     import atexit
309     atexit.register(remove_temp_ks)
310
311     return ksconf
312
313
314 def _check_mic_chroot(rootdir):
315     def _path(path):
316         return rootdir.rstrip('/') + path
317
318     release_files = map(_path, [ "/etc/moblin-release",
319                                  "/etc/meego-release",
320                                  "/etc/tizen-release"])
321
322     if not any(map(os.path.exists, release_files)):
323         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
324
325     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
326         msger.warning("Failed to find kernel module under %s" % rootdir)
327
328     return
329
330 def selinux_check(arch, fstypes):
331     try:
332         getenforce = find_binary_path('getenforce')
333     except CreatorError:
334         return
335
336     selinux_status = runner.outs([getenforce])
337     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
338         raise CreatorError("Can't create arm image if selinux is enabled, "
339                            "please run 'setenforce 0' to disable selinux")
340
341     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
342     if use_btrfs and selinux_status == "Enforcing":
343         raise CreatorError("Can't create btrfs image if selinux is enabled,"
344                            " please run 'setenforce 0' to disable selinux")
345
346 def get_image_type(path):
347     def _get_extension_name(path):
348         match = re.search("(?<=\.)\w+$", path)
349         if match:
350             return match.group(0)
351         else:
352             return None
353
354     if os.path.isdir(path):
355         _check_mic_chroot(path)
356         return "fs"
357
358     maptab = {
359               "tar": "loop",
360               "raw":"raw",
361               "vmdk":"vmdk",
362               "vdi":"vdi",
363               "iso":"livecd",
364               "usbimg":"liveusb",
365              }
366
367     extension = _get_extension_name(path)
368     if extension in maptab:
369         return maptab[extension]
370
371     fd = open(path, "rb")
372     file_header = fd.read(1024)
373     fd.close()
374     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
375     if file_header[0:len(vdi_flag)] == vdi_flag:
376         return maptab["vdi"]
377
378     #Checking f2fs fs type.
379     blkidcmd = find_binary_path("blkid")
380     out = runner.outs([blkidcmd, '-o', 'value', '-s', 'TYPE', path])
381     if out == "f2fs":
382         return "f2fsimg"
383     output = runner.outs(['file', path])
384     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
385     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
386     rawptn = re.compile(r".*x86 boot sector.*")
387     vmdkptn = re.compile(r".*VMware. disk image.*")
388     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
389     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
390     btrfsimgptn = re.compile(r".*BTRFS.*")
391     if isoptn.match(output):
392         return maptab["iso"]
393     elif usbimgptn.match(output):
394         return maptab["usbimg"]
395     elif rawptn.match(output):
396         return maptab["raw"]
397     elif vmdkptn.match(output):
398         return maptab["vmdk"]
399     elif ext3fsimgptn.match(output):
400         return "ext3fsimg"
401     elif ext4fsimgptn.match(output):
402         return "ext4fsimg"
403     elif btrfsimgptn.match(output):
404         return "btrfsimg"
405     else:
406         raise CreatorError("Cannot detect the type of image: %s" % path)
407
408
409 def get_file_size(filename):
410     """ Return size in MB unit """
411     cmd = ['du', "-s", "-b", "-B", "1M", filename]
412     rc, duOutput  = runner.runtool(cmd)
413     if rc != 0:
414         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
415     size1 = int(duOutput.split()[0])
416
417     cmd = ['du', "-s", "-B", "1M", filename]
418     rc, duOutput = runner.runtool(cmd)
419     if rc != 0:
420         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
421
422     size2 = int(duOutput.split()[0])
423     return max(size1, size2)
424
425
426 def get_filesystem_avail(fs):
427     vfstat = os.statvfs(fs)
428     return vfstat.f_bavail * vfstat.f_bsize
429
430 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
431     #convert disk format
432     if dstfmt != "raw":
433         raise CreatorError("Invalid destination image format: %s" % dstfmt)
434     msger.debug("converting %s image to %s" % (srcimg, dstimg))
435     if srcfmt == "vmdk":
436         path = find_binary_path("qemu-img")
437         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
438     elif srcfmt == "vdi":
439         path = find_binary_path("VBoxManage")
440         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
441     else:
442         raise CreatorError("Invalid soure image format: %s" % srcfmt)
443
444     rc = runner.show(argv)
445     if rc == 0:
446         msger.debug("convert successful")
447     if rc != 0:
448         raise CreatorError("Unable to convert disk to %s" % dstfmt)
449
450 def uncompress_squashfs(squashfsimg, outdir):
451     """Uncompress file system from squshfs image"""
452     unsquashfs = find_binary_path("unsquashfs")
453     args = [ unsquashfs, "-d", outdir, squashfsimg ]
454     rc = runner.show(args)
455     if (rc != 0):
456         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
457
458 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
459     """ FIXME: use the dir in mic.conf instead """
460
461     makedirs(dir)
462     return tempfile.mkdtemp(dir = dir, prefix = prefix)
463
464 def get_repostrs_from_ks(ks):
465     def _get_temp_reponame(baseurl):
466         md5obj = hashlib.md5(baseurl)
467         tmpreponame = "%s" % md5obj.hexdigest()
468         return tmpreponame
469
470     kickstart_repos = []
471
472     for repodata in ks.handler.repo.repoList:
473         repo = {}
474         for attr in ('name',
475                      'baseurl',
476                      'mirrorlist',
477                      'includepkgs', # val is list
478                      'excludepkgs', # val is list
479                      'cost',    # int
480                      'priority',# int
481                      'save',
482                      'proxy',
483                      'proxyuser',
484                      'proxypasswd',
485                      'proxypasswd',
486                      'debuginfo',
487                      'source',
488                      'gpgkey',
489                      'ssl_verify'):
490             if hasattr(repodata, attr) and getattr(repodata, attr):
491                 repo[attr] = getattr(repodata, attr)
492
493         if 'name' not in repo:
494             repo['name'] = _get_temp_reponame(repodata.baseurl)
495         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
496             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
497                                       getattr(repodata, 'user', None),
498                                       getattr(repodata, 'passwd', None))
499
500         kickstart_repos.append(repo)
501
502     return kickstart_repos
503
504 def _get_uncompressed_data_from_url(url, filename, proxies):
505     filename = myurlgrab(url.full, filename, proxies)
506     suffix = None
507     if filename.endswith(".gz"):
508         suffix = ".gz"
509         runner.quiet(['gunzip', "-f", filename])
510     elif filename.endswith(".bz2"):
511         suffix = ".bz2"
512         runner.quiet(['bunzip2', "-f", filename])
513     if suffix:
514         filename = filename.replace(suffix, "")
515     return filename
516
517 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
518                             sumtype=None, checksum=None):
519     url = baseurl.join(filename)
520     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
521     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
522         filename = os.path.splitext(filename_tmp)[0]
523     else:
524         filename = filename_tmp
525     if sumtype and checksum and os.path.exists(filename):
526         try:
527             sumcmd = find_binary_path("%ssum" % sumtype)
528         except:
529             file_checksum = None
530         else:
531             file_checksum = runner.outs([sumcmd, filename]).split()[0]
532
533         if file_checksum and file_checksum == checksum:
534             return filename
535
536     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
537
538 def get_metadata_from_repos(repos, cachedir):
539     my_repo_metadata = []
540     for repo in repos:
541         reponame = repo.name
542         baseurl = repo.baseurl
543
544         if hasattr(repo, 'proxy'):
545             proxy = repo.proxy
546         else:
547             proxy = get_proxy_for(baseurl)
548
549         proxies = None
550         if proxy:
551             proxies = {str(baseurl.split(":")[0]): str(proxy)}
552
553         makedirs(os.path.join(cachedir, reponame))
554         url = baseurl.join("repodata/repomd.xml")
555         filename = os.path.join(cachedir, reponame, 'repomd.xml')
556         repomd = myurlgrab(url.full, filename, proxies)
557         try:
558             root = xmlparse(repomd)
559         except SyntaxError:
560             raise CreatorError("repomd.xml syntax error.")
561
562         ns = root.getroot().tag
563         ns = ns[0:ns.rindex("}")+1]
564
565         filepaths = {}
566         checksums = {}
567         sumtypes = {}
568
569         for elm in root.getiterator("%sdata" % ns):
570             if elm.attrib["type"] == "patterns":
571                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
572                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
573                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
574                 break
575
576         for elm in root.getiterator("%sdata" % ns):
577             #"group" type has no "open-checksum" filed, remove it.
578             if elm.attrib["type"] == "group_gz":
579                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
580                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
581                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
582                 break
583
584         primary_type = None
585         for elm in root.getiterator("%sdata" % ns):
586             if elm.attrib["type"] in ("primary_db", "primary"):
587                 primary_type = elm.attrib["type"]
588                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
589                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
590                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
591                 break
592
593         if not primary_type:
594             continue
595
596         for item in ("primary", "patterns", "comps"):
597             if item not in filepaths:
598                 filepaths[item] = None
599                 continue
600             if not filepaths[item]:
601                 continue
602             filepaths[item] = _get_metadata_from_repo(baseurl,
603                                                       proxies,
604                                                       cachedir,
605                                                       reponame,
606                                                       filepaths[item],
607                                                       sumtypes[item],
608                                                       checksums[item])
609
610         """ Get repo key """
611         try:
612             repokey = _get_metadata_from_repo(baseurl,
613                                               proxies,
614                                               cachedir,
615                                               reponame,
616                                               "repodata/repomd.xml.key")
617         except CreatorError:
618             repokey = None
619             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
620
621         my_repo_metadata.append({"name":reponame,
622                                  "baseurl":baseurl,
623                                  "repomd":repomd,
624                                  "primary":filepaths['primary'],
625                                  "cachedir":cachedir,
626                                  "proxies":proxies,
627                                  "patterns":filepaths['patterns'],
628                                  "comps":filepaths['comps'],
629                                  "repokey":repokey,
630                                  "priority":repo.priority})
631
632     return my_repo_metadata
633
634 def get_rpmver_in_repo(repometadata):
635     for repo in repometadata:
636         if repo["primary"].endswith(".xml"):
637             root = xmlparse(repo["primary"])
638             ns = root.getroot().tag
639             ns = ns[0:ns.rindex("}")+1]
640
641             versionlist = []
642             for elm in root.getiterator("%spackage" % ns):
643                 if elm.find("%sname" % ns).text == 'rpm':
644                     for node in elm.getchildren():
645                         if node.tag == "%sversion" % ns:
646                             versionlist.append(node.attrib['ver'])
647
648             if versionlist:
649                 return reversed(
650                          sorted(
651                            versionlist,
652                            key = lambda ver: map(int, ver.split('.')))).next()
653
654         elif repo["primary"].endswith(".sqlite"):
655             con = sqlite.connect(repo["primary"])
656             for row in con.execute("select version from packages where "
657                                    "name=\"rpm\" ORDER by version DESC"):
658                 con.close()
659                 return row[0]
660
661     return None
662
663 def get_arch(repometadata):
664     archlist = []
665     for repo in repometadata:
666         if repo["primary"].endswith(".xml"):
667             root = xmlparse(repo["primary"])
668             ns = root.getroot().tag
669             ns = ns[0:ns.rindex("}")+1]
670             for elm in root.getiterator("%spackage" % ns):
671                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
672                     arch = elm.find("%sarch" % ns).text
673                     if arch not in archlist:
674                         archlist.append(arch)
675         elif repo["primary"].endswith(".sqlite"):
676             con = sqlite.connect(repo["primary"])
677             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
678                 if row[0] not in archlist:
679                     archlist.append(row[0])
680
681             con.close()
682
683     uniq_arch = []
684     for i in range(len(archlist)):
685         if archlist[i] not in rpmmisc.archPolicies.keys():
686             continue
687         need_append = True
688         j = 0
689         while j < len(uniq_arch):
690             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
691                 need_append = False
692                 break
693             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
694                 if need_append:
695                     uniq_arch[j] = archlist[i]
696                     need_append = False
697                 else:
698                     uniq_arch.remove(uniq_arch[j])
699                     continue
700             j += 1
701         if need_append:
702              uniq_arch.append(archlist[i])
703
704     return uniq_arch, archlist
705
706 def get_package(pkg, repometadata, arch = None):
707     ver = ""
708     priority = 99
709     target_repo = None
710     if not arch:
711         arches = []
712     elif arch not in rpmmisc.archPolicies:
713         arches = [arch]
714     else:
715         arches = rpmmisc.archPolicies[arch].split(':')
716         arches.append('noarch')
717
718     for repo in repometadata:
719         if repo["primary"].endswith(".xml"):
720             root = xmlparse(repo["primary"])
721             ns = root.getroot().tag
722             ns = ns[0:ns.rindex("}")+1]
723             for elm in root.getiterator("%spackage" % ns):
724                 if elm.find("%sname" % ns).text == pkg and elm.find("%sarch" % ns).text in arches:
725                     if repo["priority"] != None:
726                         tmpprior = int(repo["priority"])
727                         if tmpprior < priority:
728                             priority = tmpprior
729                             location = elm.find("%slocation" % ns)
730                             pkgpath = "%s" % location.attrib['href']
731                             target_repo = repo
732                             break
733                         elif tmpprior > priority:
734                             break
735                     version = elm.find("%sversion" % ns)
736                     tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
737                     if tmpver > ver:
738                         ver = tmpver
739                         location = elm.find("%slocation" % ns)
740                         pkgpath = "%s" % location.attrib['href']
741                         target_repo = repo
742                     break
743         if repo["primary"].endswith(".sqlite"):
744             con = sqlite.connect(repo["primary"])
745             if arch:
746                 sql = 'select version, release, location_href from packages ' \
747                       'where name = "%s" and arch IN ("%s")' % \
748                       (pkg, '","'.join(arches))
749                 for row in con.execute(sql):
750                     tmpver = "%s-%s" % (row[0], row[1])
751                     if tmpver > ver:
752                         ver = tmpver
753                         pkgpath = "%s" % row[2]
754                         target_repo = repo
755                     break
756             else:
757                 sql = 'select version, release, location_href from packages ' \
758                       'where name = "%s"' % pkg
759                 for row in con.execute(sql):
760                     tmpver = "%s-%s" % (row[0], row[1])
761                     if tmpver > ver:
762                         ver = tmpver
763                         pkgpath = "%s" % row[2]
764                         target_repo = repo
765                     break
766             con.close()
767     if target_repo:
768         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
769         url = target_repo["baseurl"].join(pkgpath)
770         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
771         if os.path.exists(filename):
772             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
773             if ret == 0:
774                 return filename
775
776             msger.warning("package %s is damaged: %s" %
777                           (os.path.basename(filename), filename))
778             os.unlink(filename)
779
780         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
781         return pkg
782     else:
783         return None
784
785 def get_source_name(pkg, repometadata):
786
787     def get_bin_name(pkg):
788         m = RPM_RE.match(pkg)
789         if m:
790             return m.group(1)
791         return None
792
793     def get_src_name(srpm):
794         m = SRPM_RE.match(srpm)
795         if m:
796             return m.group(1)
797         return None
798
799     ver = ""
800     target_repo = None
801
802     pkg_name = get_bin_name(pkg)
803     if not pkg_name:
804         return None
805
806     for repo in repometadata:
807         if repo["primary"].endswith(".xml"):
808             root = xmlparse(repo["primary"])
809             ns = root.getroot().tag
810             ns = ns[0:ns.rindex("}")+1]
811             for elm in root.getiterator("%spackage" % ns):
812                 if elm.find("%sname" % ns).text == pkg_name:
813                     if elm.find("%sarch" % ns).text != "src":
814                         version = elm.find("%sversion" % ns)
815                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
816                         if tmpver > ver:
817                             ver = tmpver
818                             fmt = elm.find("%sformat" % ns)
819                             if fmt:
820                                 fns = fmt.getchildren()[0].tag
821                                 fns = fns[0:fns.rindex("}")+1]
822                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
823                                 target_repo = repo
824                         break
825
826         if repo["primary"].endswith(".sqlite"):
827             con = sqlite.connect(repo["primary"])
828             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
829                 tmpver = "%s-%s" % (row[0], row[1])
830                 if tmpver > ver:
831                     pkgpath = "%s" % row[2]
832                     target_repo = repo
833                 break
834             con.close()
835     if target_repo:
836         return get_src_name(pkgpath)
837     else:
838         return None
839
840 def get_pkglist_in_patterns(group, patterns):
841     found = False
842     pkglist = []
843     try:
844         root = xmlparse(patterns)
845     except SyntaxError:
846         raise SyntaxError("%s syntax error." % patterns)
847
848     for elm in list(root.getroot()):
849         ns = elm.tag
850         ns = ns[0:ns.rindex("}")+1]
851         name = elm.find("%sname" % ns)
852         summary = elm.find("%ssummary" % ns)
853         if name.text == group or summary.text == group:
854             found = True
855             break
856
857     if not found:
858         return pkglist
859
860     found = False
861     for requires in list(elm):
862         if requires.tag.endswith("requires"):
863             found = True
864             break
865
866     if not found:
867         return pkglist
868
869     for pkg in list(requires):
870         pkgname = pkg.attrib["name"]
871         if pkgname not in pkglist:
872             pkglist.append(pkgname)
873
874     return pkglist
875
876 def get_pkglist_in_comps(group, comps):
877     found = False
878     pkglist = []
879     try:
880         root = xmlparse(comps)
881     except SyntaxError:
882         raise SyntaxError("%s syntax error." % comps)
883
884     for elm in root.getiterator("group"):
885         id = elm.find("id")
886         name = elm.find("name")
887         if id.text == group or name.text == group:
888             packagelist = elm.find("packagelist")
889             found = True
890             break
891
892     if not found:
893         return pkglist
894
895     for require in elm.getiterator("packagereq"):
896         if require.tag.endswith("packagereq"):
897             pkgname = require.text
898         if pkgname not in pkglist:
899             pkglist.append(pkgname)
900
901     return pkglist
902
903 def is_statically_linked(binary):
904     return ", statically linked, " in runner.outs(['file', binary])
905
906 def get_qemu_arm_binary(arch):
907     if arch == "aarch64":
908         node = "/proc/sys/fs/binfmt_misc/aarch64"
909         if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
910             arm_binary = "qemu-arm64"
911         elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
912             arm_binary = "qemu-aarch64"
913         elif os.path.exists("/usr/bin/qemu-arm64-static"):
914             arm_binary = "qemu-arm64-static"
915         elif os.path.exists("/usr/bin/qemu-aarch64-static"):
916             arm_binary = "qemu-aarch64-static"
917         else:
918             raise CreatorError("Please install a statically-linked %s" % arm_binary)
919     elif arch == "mipsel":
920         node = "/proc/sys/fs/binfmt_misc/mipsel"
921         arm_binary = "qemu-mipsel"
922         if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
923             arm_binary = "qemu-mipsel-static"
924         if not os.path.exists("/usr/bin/%s" % arm_binary):
925             raise CreatorError("Please install a statically-linked %s" % arm_binary)
926     else:
927         node = "/proc/sys/fs/binfmt_misc/arm"
928         arm_binary = "qemu-arm"
929         if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
930             arm_binary = "qemu-arm-static"
931         if not os.path.exists("/usr/bin/%s" % arm_binary):
932             raise CreatorError("Please install a statically-linked %s" % arm_binary)
933
934     return (arm_binary, node)
935
936 def setup_qemu_emulator(rootdir, arch):
937     qemu_emulators = []
938     # mount binfmt_misc if it doesn't exist
939     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
940         modprobecmd = find_binary_path("modprobe")
941         runner.show([modprobecmd, "binfmt_misc"])
942     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
943         mountcmd = find_binary_path("mount")
944         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
945
946     # qemu_emulator is a special case, we can't use find_binary_path
947     # qemu emulator should be a statically-linked executable file
948     arm_binary, node = get_qemu_arm_binary(arch)
949     qemu_emulator = "/usr/bin/%s" % arm_binary
950
951     if not os.path.exists(rootdir + "/usr/bin"):
952         makedirs(rootdir + "/usr/bin")
953     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
954     qemu_emulators.append(qemu_emulator)
955
956     # disable selinux, selinux will block qemu emulator to run
957     if os.path.exists("/usr/sbin/setenforce"):
958         msger.info('Try to disable selinux')
959         runner.show(["/usr/sbin/setenforce", "0"])
960
961     # register qemu emulator for interpreting other arch executable file
962     if not os.path.exists(node):
963         if arch == "aarch64":
964             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
965         elif arch == "mipsel":
966             qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
967         else:
968             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
969
970         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
971             fd.write(qemu_arm_string)
972     else:
973         flags = ""
974         interpreter = ""
975         with open(node, "r") as fd:
976             for line in fd.readlines():
977                 if line.startswith("flags:"):
978                     flags = line[len("flags:"):].strip()
979                 elif line.startswith("interpreter"):
980                     interpreter = line[len("interpreter"):].strip()
981
982         if flags == "P" and interpreter.endswith("-binfmt"):
983             # copy binfmt wrapper when preserve-argv[0] flag is enabled
984             shutil.copy(os.path.realpath(interpreter), rootdir + interpreter)
985             qemu_emulators.append(interpreter)
986         elif not flags and interpreter != qemu_emulator:
987             # create symlink as registered qemu emulator
988             os.symlink(qemu_emulator, rootdir + interpreter)
989             qemu_emulators.append(interpreter)
990
991     return qemu_emulators
992
993 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
994     def get_source_repometadata(repometadata):
995         src_repometadata=[]
996         for repo in repometadata:
997             if repo["name"].endswith("-source"):
998                 src_repometadata.append(repo)
999         if src_repometadata:
1000             return src_repometadata
1001         return None
1002
1003     def get_src_name(srpm):
1004         m = SRPM_RE.match(srpm)
1005         if m:
1006             return m.group(1)
1007         return None
1008
1009     src_repometadata = get_source_repometadata(repometadata)
1010
1011     if not src_repometadata:
1012         msger.warning("No source repo found")
1013         return None
1014
1015     src_pkgs = []
1016     lpkgs_dict = {}
1017     lpkgs_path = []
1018     for repo in src_repometadata:
1019         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1020         lpkgs_path += glob.glob(cachepath)
1021
1022     for lpkg in lpkgs_path:
1023         lpkg_name = get_src_name(os.path.basename(lpkg))
1024         lpkgs_dict[lpkg_name] = lpkg
1025     localpkgs = lpkgs_dict.keys()
1026
1027     cached_count = 0
1028     destdir = instroot+'/usr/src/SRPMS'
1029     if not os.path.exists(destdir):
1030         os.makedirs(destdir)
1031
1032     srcpkgset = set()
1033     for _pkg in pkgs:
1034         srcpkg_name = get_source_name(_pkg, repometadata)
1035         if not srcpkg_name:
1036             continue
1037         srcpkgset.add(srcpkg_name)
1038
1039     for pkg in list(srcpkgset):
1040         if pkg in localpkgs:
1041             cached_count += 1
1042             shutil.copy(lpkgs_dict[pkg], destdir)
1043             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1044         else:
1045             src_pkg = get_package(pkg, src_repometadata, 'src')
1046             if src_pkg:
1047                 shutil.copy(src_pkg, destdir)
1048                 src_pkgs.append(src_pkg)
1049     msger.info("%d source packages gotten from cache" % cached_count)
1050
1051     return src_pkgs
1052
1053 def strip_end(text, suffix):
1054     if not text.endswith(suffix):
1055         return text
1056     return text[:-len(suffix)]
1057
1058 def strip_archive_suffix(filename):
1059     for suffix in get_archive_suffixes():
1060         if filename.endswith(suffix):
1061             return filename[:-len(suffix)]
1062     else:
1063         msger.warning("Not supported archive file format: %s" % filename)
1064     return None