Locally built mic-bootstrap using gbs cannot be used in mic.
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.utils.errors import CreatorError, SquashfsError
45 from mic.utils.fs_related import find_binary_path, makedirs
46 from mic.utils.grabber import myurlgrab
47 from mic.utils.proxy import get_proxy_for
48 from mic.utils import runner
49 from mic.utils import rpmmisc
50 from mic.utils.safeurl import SafeURL
51
52
53 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
54 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
55 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
56
57
58 def build_name(kscfg, release=None, prefix = None, suffix = None):
59     """Construct and return an image name string.
60
61     This is a utility function to help create sensible name and fslabel
62     strings. The name is constructed using the sans-prefix-and-extension
63     kickstart filename and the supplied prefix and suffix.
64
65     kscfg -- a path to a kickstart file
66     release --  a replacement to suffix for image release
67     prefix -- a prefix to prepend to the name; defaults to None, which causes
68               no prefix to be used
69     suffix -- a suffix to append to the name; defaults to None, which causes
70               a YYYYMMDDHHMM suffix to be used
71
72     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
73
74     """
75     name = os.path.basename(kscfg)
76     idx = name.rfind('.')
77     if idx >= 0:
78         name = name[:idx]
79
80     if release is not None:
81         suffix = ""
82     if prefix is None:
83         prefix = ""
84     if suffix is None:
85         suffix = time.strftime("%Y%m%d%H%M")
86
87     if name.startswith(prefix):
88         name = name[len(prefix):]
89
90     prefix = "%s-" % prefix if prefix else ""
91     suffix = "-%s" % suffix if suffix else ""
92
93     ret = prefix + name + suffix
94     return ret
95
96 def get_distro():
97     """Detect linux distribution, support "meego"
98     """
99
100     support_dists = ('SuSE',
101                      'debian',
102                      'fedora',
103                      'redhat',
104                      'centos',
105                      'meego',
106                      'moblin',
107                      'tizen')
108     try:
109         (dist, ver, id) = platform.linux_distribution( \
110                               supported_dists = support_dists)
111     except:
112         (dist, ver, id) = platform.dist( \
113                               supported_dists = support_dists)
114
115     return (dist, ver, id)
116
117 def get_hostname():
118     """Get hostname
119     """
120     return platform.node()
121
122 def get_hostname_distro_str():
123     """Get composited string for current linux distribution
124     """
125     (dist, ver, id) = get_distro()
126     hostname = get_hostname()
127
128     if not dist:
129         return "%s(Unknown Linux Distribution)" % hostname
130     else:
131         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
132         return distro_str.strip()
133
134 _LOOP_RULE_PTH = None
135
136 def hide_loopdev_presentation():
137     udev_rules = "80-prevent-loop-present.rules"
138     udev_rules_dir = [
139                        '/usr/lib/udev/rules.d/',
140                        '/lib/udev/rules.d/',
141                        '/etc/udev/rules.d/'
142                      ]
143
144     global _LOOP_RULE_PTH
145
146     for rdir in udev_rules_dir:
147         if os.path.exists(rdir):
148             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
149
150     if not _LOOP_RULE_PTH:
151         return
152
153     try:
154         with open(_LOOP_RULE_PTH, 'w') as wf:
155             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
156
157         runner.quiet('udevadm trigger')
158     except:
159         pass
160
161 def unhide_loopdev_presentation():
162     global _LOOP_RULE_PTH
163
164     if not _LOOP_RULE_PTH:
165         return
166
167     try:
168         os.unlink(_LOOP_RULE_PTH)
169         runner.quiet('udevadm trigger')
170     except:
171         pass
172
173 def extract_rpm(rpmfile, targetdir):
174     rpm2cpio = find_binary_path("rpm2cpio")
175     cpio = find_binary_path("cpio")
176
177     olddir = os.getcwd()
178     os.chdir(targetdir)
179
180     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
181     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
182     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
183                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
184     p1.stdout.close()
185     (sout, serr) = p2.communicate()
186     msger.verbose(sout or serr)
187
188     os.chdir(olddir)
189
190 def human_size(size):
191     """Return human readable string for Bytes size
192     """
193
194     if size <= 0:
195         return "0M"
196     import math
197     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
198     expo = int(math.log(size, 1024))
199     mant = float(size/math.pow(1024, expo))
200     return "{0:.1f}{1:s}".format(mant, measure[expo])
201
202 def get_block_size(file_obj):
203     """ Returns block size for file object 'file_obj'. Errors are indicated by
204     the 'IOError' exception. """
205
206     from fcntl import ioctl
207     import struct
208
209     # Get the block size of the host file-system for the image file by calling
210     # the FIGETBSZ ioctl (number 2).
211     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
212     return struct.unpack('I', binary_data)[0]
213
214 def check_space_pre_cp(src, dst):
215     """Check whether disk space is enough before 'cp' like
216     operations, else exception will be raised.
217     """
218
219     srcsize  = get_file_size(src) * 1024 * 1024
220     freesize = get_filesystem_avail(dst)
221     if srcsize > freesize:
222         raise CreatorError("space on %s(%s) is not enough for about %s files"
223                            % (dst, human_size(freesize), human_size(srcsize)))
224
225 def calc_hashes(file_path, hash_names, start = 0, end = None):
226     """ Calculate hashes for a file. The 'file_path' argument is the file
227     to calculate hash functions for, 'start' and 'end' are the starting and
228     ending file offset to calculate the has functions for. The 'hash_names'
229     argument is a list of hash names to calculate. Returns the the list
230     of calculated hash values in the hexadecimal form in the same order
231     as 'hash_names'.
232     """
233     if end == None:
234         end = os.path.getsize(file_path)
235
236     chunk_size = 65536
237     to_read = end - start
238     read = 0
239
240     hashes = []
241     for hash_name in hash_names:
242         hashes.append(hashlib.new(hash_name))
243
244     with open(file_path, "rb") as f:
245         f.seek(start)
246
247         while read < to_read:
248             if read + chunk_size > to_read:
249                 chunk_size = to_read - read
250             chunk = f.read(chunk_size)
251             for hash_obj in hashes:
252                 hash_obj.update(chunk)
253             read += chunk_size
254
255     result = []
256     for hash_obj in hashes:
257         result.append(hash_obj.hexdigest())
258
259     return result
260
261 def get_md5sum(fpath):
262     return calc_hashes(fpath, ('md5', ))[0]
263
264 def get_sha1sum(fpath):
265     return calc_hashes(fpath, ('sha1', ))[0]
266
267 def get_sha256sum(fpath):
268     return calc_hashes(fpath, ('sha256', ))[0]
269
270 def normalize_ksfile(ksconf, release, arch):
271     '''
272     Return the name of a normalized ks file in which macro variables
273     @BUILD_ID@ and @ARCH@ are replace with real values.
274
275     The original ks file is returned if no special macro is used, otherwise
276     a temp file is created and returned, which will be deleted when program
277     exits normally.
278     '''
279
280     if not release:
281         release = "latest"
282     if not arch or re.match(r'i.86', arch):
283         arch = "ia32"
284
285     with open(ksconf) as f:
286         ksc = f.read()
287
288     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
289         return ksconf
290
291     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
292     ksc = ksc.replace("@ARCH@", arch)
293     ksc = ksc.replace("@BUILD_ID@", release)
294
295     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
296     os.write(fd, ksc)
297     os.close(fd)
298
299     msger.debug('normalized ks file:%s' % ksconf)
300
301     def remove_temp_ks():
302         try:
303             os.unlink(ksconf)
304         except OSError, err:
305             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
306
307     import atexit
308     atexit.register(remove_temp_ks)
309
310     return ksconf
311
312
313 def _check_mic_chroot(rootdir):
314     def _path(path):
315         return rootdir.rstrip('/') + path
316
317     release_files = map(_path, [ "/etc/moblin-release",
318                                  "/etc/meego-release",
319                                  "/etc/tizen-release"])
320
321     if not any(map(os.path.exists, release_files)):
322         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
323
324     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
325         msger.warning("Failed to find kernel module under %s" % rootdir)
326
327     return
328
329 def selinux_check(arch, fstypes):
330     try:
331         getenforce = find_binary_path('getenforce')
332     except CreatorError:
333         return
334
335     selinux_status = runner.outs([getenforce])
336     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
337         raise CreatorError("Can't create arm image if selinux is enabled, "
338                            "please run 'setenforce 0' to disable selinux")
339
340     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
341     if use_btrfs and selinux_status == "Enforcing":
342         raise CreatorError("Can't create btrfs image if selinux is enabled,"
343                            " please run 'setenforce 0' to disable selinux")
344
345 def get_image_type(path):
346     def _get_extension_name(path):
347         match = re.search("(?<=\.)\w+$", path)
348         if match:
349             return match.group(0)
350         else:
351             return None
352
353     if os.path.isdir(path):
354         _check_mic_chroot(path)
355         return "fs"
356
357     maptab = {
358               "tar": "loop",
359               "raw":"raw",
360               "vmdk":"vmdk",
361               "vdi":"vdi",
362               "iso":"livecd",
363               "usbimg":"liveusb",
364              }
365
366     extension = _get_extension_name(path)
367     if extension in maptab:
368         return maptab[extension]
369
370     fd = open(path, "rb")
371     file_header = fd.read(1024)
372     fd.close()
373     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
374     if file_header[0:len(vdi_flag)] == vdi_flag:
375         return maptab["vdi"]
376
377     output = runner.outs(['file', path])
378     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
379     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
380     rawptn = re.compile(r".*x86 boot sector.*")
381     vmdkptn = re.compile(r".*VMware. disk image.*")
382     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
383     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
384     btrfsimgptn = re.compile(r".*BTRFS.*")
385     if isoptn.match(output):
386         return maptab["iso"]
387     elif usbimgptn.match(output):
388         return maptab["usbimg"]
389     elif rawptn.match(output):
390         return maptab["raw"]
391     elif vmdkptn.match(output):
392         return maptab["vmdk"]
393     elif ext3fsimgptn.match(output):
394         return "ext3fsimg"
395     elif ext4fsimgptn.match(output):
396         return "ext4fsimg"
397     elif btrfsimgptn.match(output):
398         return "btrfsimg"
399     else:
400         raise CreatorError("Cannot detect the type of image: %s" % path)
401
402
403 def get_file_size(filename):
404     """ Return size in MB unit """
405     cmd = ['du', "-s", "-b", "-B", "1M", filename]
406     rc, duOutput  = runner.runtool(cmd)
407     if rc != 0:
408         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
409     size1 = int(duOutput.split()[0])
410
411     cmd = ['du', "-s", "-B", "1M", filename]
412     rc, duOutput = runner.runtool(cmd)
413     if rc != 0:
414         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
415
416     size2 = int(duOutput.split()[0])
417     return max(size1, size2)
418
419
420 def get_filesystem_avail(fs):
421     vfstat = os.statvfs(fs)
422     return vfstat.f_bavail * vfstat.f_bsize
423
424 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
425     #convert disk format
426     if dstfmt != "raw":
427         raise CreatorError("Invalid destination image format: %s" % dstfmt)
428     msger.debug("converting %s image to %s" % (srcimg, dstimg))
429     if srcfmt == "vmdk":
430         path = find_binary_path("qemu-img")
431         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
432     elif srcfmt == "vdi":
433         path = find_binary_path("VBoxManage")
434         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
435     else:
436         raise CreatorError("Invalid soure image format: %s" % srcfmt)
437
438     rc = runner.show(argv)
439     if rc == 0:
440         msger.debug("convert successful")
441     if rc != 0:
442         raise CreatorError("Unable to convert disk to %s" % dstfmt)
443
444 def uncompress_squashfs(squashfsimg, outdir):
445     """Uncompress file system from squshfs image"""
446     unsquashfs = find_binary_path("unsquashfs")
447     args = [ unsquashfs, "-d", outdir, squashfsimg ]
448     rc = runner.show(args)
449     if (rc != 0):
450         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
451
452 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
453     """ FIXME: use the dir in mic.conf instead """
454
455     makedirs(dir)
456     return tempfile.mkdtemp(dir = dir, prefix = prefix)
457
458 def get_repostrs_from_ks(ks):
459     def _get_temp_reponame(baseurl):
460         md5obj = hashlib.md5(baseurl)
461         tmpreponame = "%s" % md5obj.hexdigest()
462         return tmpreponame
463
464     kickstart_repos = []
465
466     for repodata in ks.handler.repo.repoList:
467         repo = {}
468         for attr in ('name',
469                      'baseurl',
470                      'mirrorlist',
471                      'includepkgs', # val is list
472                      'excludepkgs', # val is list
473                      'cost',    # int
474                      'priority',# int
475                      'save',
476                      'proxy',
477                      'proxyuser',
478                      'proxypasswd',
479                      'proxypasswd',
480                      'debuginfo',
481                      'source',
482                      'gpgkey',
483                      'ssl_verify'):
484             if hasattr(repodata, attr) and getattr(repodata, attr):
485                 repo[attr] = getattr(repodata, attr)
486
487         if 'name' not in repo:
488             repo['name'] = _get_temp_reponame(repodata.baseurl)
489         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
490             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
491                                       getattr(repodata, 'user', None),
492                                       getattr(repodata, 'passwd', None))
493
494         kickstart_repos.append(repo)
495
496     return kickstart_repos
497
498 def _get_uncompressed_data_from_url(url, filename, proxies):
499     filename = myurlgrab(url.full, filename, proxies)
500     suffix = None
501     if filename.endswith(".gz"):
502         suffix = ".gz"
503         runner.quiet(['gunzip', "-f", filename])
504     elif filename.endswith(".bz2"):
505         suffix = ".bz2"
506         runner.quiet(['bunzip2', "-f", filename])
507     if suffix:
508         filename = filename.replace(suffix, "")
509     return filename
510
511 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
512                             sumtype=None, checksum=None):
513     url = baseurl.join(filename)
514     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
515     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
516         filename = os.path.splitext(filename_tmp)[0]
517     else:
518         filename = filename_tmp
519     if sumtype and checksum and os.path.exists(filename):
520         try:
521             sumcmd = find_binary_path("%ssum" % sumtype)
522         except:
523             file_checksum = None
524         else:
525             file_checksum = runner.outs([sumcmd, filename]).split()[0]
526
527         if file_checksum and file_checksum == checksum:
528             return filename
529
530     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
531
532 def get_metadata_from_repos(repos, cachedir):
533     my_repo_metadata = []
534     for repo in repos:
535         reponame = repo.name
536         baseurl = repo.baseurl
537
538         if hasattr(repo, 'proxy'):
539             proxy = repo.proxy
540         else:
541             proxy = get_proxy_for(baseurl)
542
543         proxies = None
544         if proxy:
545             proxies = {str(baseurl.split(":")[0]): str(proxy)}
546
547         makedirs(os.path.join(cachedir, reponame))
548         url = baseurl.join("repodata/repomd.xml")
549         filename = os.path.join(cachedir, reponame, 'repomd.xml')
550         repomd = myurlgrab(url.full, filename, proxies)
551         try:
552             root = xmlparse(repomd)
553         except SyntaxError:
554             raise CreatorError("repomd.xml syntax error.")
555
556         ns = root.getroot().tag
557         ns = ns[0:ns.rindex("}")+1]
558
559         filepaths = {}
560         checksums = {}
561         sumtypes = {}
562
563         for elm in root.getiterator("%sdata" % ns):
564             if elm.attrib["type"] == "patterns":
565                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
566                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
567                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
568                 break
569
570         for elm in root.getiterator("%sdata" % ns):
571             if elm.attrib["type"] in ("group_gz", "group"):
572                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
573                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
574                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
575                 break
576
577         primary_type = None
578         for elm in root.getiterator("%sdata" % ns):
579             if elm.attrib["type"] in ("primary_db", "primary"):
580                 primary_type = elm.attrib["type"]
581                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
582                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
583                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
584                 break
585
586         if not primary_type:
587             continue
588
589         for item in ("primary", "patterns", "comps"):
590             if item not in filepaths:
591                 filepaths[item] = None
592                 continue
593             if not filepaths[item]:
594                 continue
595             filepaths[item] = _get_metadata_from_repo(baseurl,
596                                                       proxies,
597                                                       cachedir,
598                                                       reponame,
599                                                       filepaths[item],
600                                                       sumtypes[item],
601                                                       checksums[item])
602
603         """ Get repo key """
604         try:
605             repokey = _get_metadata_from_repo(baseurl,
606                                               proxies,
607                                               cachedir,
608                                               reponame,
609                                               "repodata/repomd.xml.key")
610         except CreatorError:
611             repokey = None
612             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
613
614         my_repo_metadata.append({"name":reponame,
615                                  "baseurl":baseurl,
616                                  "repomd":repomd,
617                                  "primary":filepaths['primary'],
618                                  "cachedir":cachedir,
619                                  "proxies":proxies,
620                                  "patterns":filepaths['patterns'],
621                                  "comps":filepaths['comps'],
622                                  "repokey":repokey,
623                                  "priority":repo.priority})
624
625     return my_repo_metadata
626
627 def get_rpmver_in_repo(repometadata):
628     for repo in repometadata:
629         if repo["primary"].endswith(".xml"):
630             root = xmlparse(repo["primary"])
631             ns = root.getroot().tag
632             ns = ns[0:ns.rindex("}")+1]
633
634             versionlist = []
635             for elm in root.getiterator("%spackage" % ns):
636                 if elm.find("%sname" % ns).text == 'rpm':
637                     for node in elm.getchildren():
638                         if node.tag == "%sversion" % ns:
639                             versionlist.append(node.attrib['ver'])
640
641             if versionlist:
642                 return reversed(
643                          sorted(
644                            versionlist,
645                            key = lambda ver: map(int, ver.split('.')))).next()
646
647         elif repo["primary"].endswith(".sqlite"):
648             con = sqlite.connect(repo["primary"])
649             for row in con.execute("select version from packages where "
650                                    "name=\"rpm\" ORDER by version DESC"):
651                 con.close()
652                 return row[0]
653
654     return None
655
656 def get_arch(repometadata):
657     archlist = []
658     for repo in repometadata:
659         if repo["primary"].endswith(".xml"):
660             root = xmlparse(repo["primary"])
661             ns = root.getroot().tag
662             ns = ns[0:ns.rindex("}")+1]
663             for elm in root.getiterator("%spackage" % ns):
664                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
665                     arch = elm.find("%sarch" % ns).text
666                     if arch not in archlist:
667                         archlist.append(arch)
668         elif repo["primary"].endswith(".sqlite"):
669             con = sqlite.connect(repo["primary"])
670             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
671                 if row[0] not in archlist:
672                     archlist.append(row[0])
673
674             con.close()
675
676     uniq_arch = []
677     for i in range(len(archlist)):
678         if archlist[i] not in rpmmisc.archPolicies.keys():
679             continue
680         need_append = True
681         j = 0
682         while j < len(uniq_arch):
683             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
684                 need_append = False
685                 break
686             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
687                 if need_append:
688                     uniq_arch[j] = archlist[i]
689                     need_append = False
690                 else:
691                     uniq_arch.remove(uniq_arch[j])
692                     continue
693             j += 1
694         if need_append:
695              uniq_arch.append(archlist[i])
696
697     return uniq_arch, archlist
698
699 def get_package(pkg, repometadata, arch = None):
700     ver = ""
701     priority = 99
702     target_repo = None
703     if not arch:
704         arches = []
705     elif arch not in rpmmisc.archPolicies:
706         arches = [arch]
707     else:
708         arches = rpmmisc.archPolicies[arch].split(':')
709         arches.append('noarch')
710
711     for repo in repometadata:
712         if repo["primary"].endswith(".xml"):
713             root = xmlparse(repo["primary"])
714             ns = root.getroot().tag
715             ns = ns[0:ns.rindex("}")+1]
716             for elm in root.getiterator("%spackage" % ns):
717                 if elm.find("%sname" % ns).text == pkg:
718                     if elm.find("%sarch" % ns).text in arches:
719                         if repo["priority"] != None:
720                             tmpprior = int(repo["priority"])
721                             if tmpprior < priority:
722                                 priority = tmpprior
723                                 location = elm.find("%slocation" % ns)
724                                 pkgpath = "%s" % location.attrib['href']
725                                 target_repo = repo
726                                 break
727                             elif tmpprior > priority:
728                                 break
729                         version = elm.find("%sversion" % ns)
730                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
731                         if tmpver > ver:
732                             ver = tmpver
733                             location = elm.find("%slocation" % ns)
734                             pkgpath = "%s" % location.attrib['href']
735                             target_repo = repo
736                         break
737         if repo["primary"].endswith(".sqlite"):
738             con = sqlite.connect(repo["primary"])
739             if arch:
740                 sql = 'select version, release, location_href from packages ' \
741                       'where name = "%s" and arch IN ("%s")' % \
742                       (pkg, '","'.join(arches))
743                 for row in con.execute(sql):
744                     tmpver = "%s-%s" % (row[0], row[1])
745                     if tmpver > ver:
746                         ver = tmpver
747                         pkgpath = "%s" % row[2]
748                         target_repo = repo
749                     break
750             else:
751                 sql = 'select version, release, location_href from packages ' \
752                       'where name = "%s"' % pkg
753                 for row in con.execute(sql):
754                     tmpver = "%s-%s" % (row[0], row[1])
755                     if tmpver > ver:
756                         ver = tmpver
757                         pkgpath = "%s" % row[2]
758                         target_repo = repo
759                     break
760             con.close()
761     if target_repo:
762         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
763         url = target_repo["baseurl"].join(pkgpath)
764         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
765         if os.path.exists(filename):
766             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
767             if ret == 0:
768                 return filename
769
770             msger.warning("package %s is damaged: %s" %
771                           (os.path.basename(filename), filename))
772             os.unlink(filename)
773
774         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
775         return pkg
776     else:
777         return None
778
779 def get_source_name(pkg, repometadata):
780
781     def get_bin_name(pkg):
782         m = RPM_RE.match(pkg)
783         if m:
784             return m.group(1)
785         return None
786
787     def get_src_name(srpm):
788         m = SRPM_RE.match(srpm)
789         if m:
790             return m.group(1)
791         return None
792
793     ver = ""
794     target_repo = None
795
796     pkg_name = get_bin_name(pkg)
797     if not pkg_name:
798         return None
799
800     for repo in repometadata:
801         if repo["primary"].endswith(".xml"):
802             root = xmlparse(repo["primary"])
803             ns = root.getroot().tag
804             ns = ns[0:ns.rindex("}")+1]
805             for elm in root.getiterator("%spackage" % ns):
806                 if elm.find("%sname" % ns).text == pkg_name:
807                     if elm.find("%sarch" % ns).text != "src":
808                         version = elm.find("%sversion" % ns)
809                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
810                         if tmpver > ver:
811                             ver = tmpver
812                             fmt = elm.find("%sformat" % ns)
813                             if fmt:
814                                 fns = fmt.getchildren()[0].tag
815                                 fns = fns[0:fns.rindex("}")+1]
816                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
817                                 target_repo = repo
818                         break
819
820         if repo["primary"].endswith(".sqlite"):
821             con = sqlite.connect(repo["primary"])
822             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
823                 tmpver = "%s-%s" % (row[0], row[1])
824                 if tmpver > ver:
825                     pkgpath = "%s" % row[2]
826                     target_repo = repo
827                 break
828             con.close()
829     if target_repo:
830         return get_src_name(pkgpath)
831     else:
832         return None
833
834 def get_pkglist_in_patterns(group, patterns):
835     found = False
836     pkglist = []
837     try:
838         root = xmlparse(patterns)
839     except SyntaxError:
840         raise SyntaxError("%s syntax error." % patterns)
841
842     for elm in list(root.getroot()):
843         ns = elm.tag
844         ns = ns[0:ns.rindex("}")+1]
845         name = elm.find("%sname" % ns)
846         summary = elm.find("%ssummary" % ns)
847         if name.text == group or summary.text == group:
848             found = True
849             break
850
851     if not found:
852         return pkglist
853
854     found = False
855     for requires in list(elm):
856         if requires.tag.endswith("requires"):
857             found = True
858             break
859
860     if not found:
861         return pkglist
862
863     for pkg in list(requires):
864         pkgname = pkg.attrib["name"]
865         if pkgname not in pkglist:
866             pkglist.append(pkgname)
867
868     return pkglist
869
870 def get_pkglist_in_comps(group, comps):
871     found = False
872     pkglist = []
873     try:
874         root = xmlparse(comps)
875     except SyntaxError:
876         raise SyntaxError("%s syntax error." % comps)
877
878     for elm in root.getiterator("group"):
879         id = elm.find("id")
880         name = elm.find("name")
881         if id.text == group or name.text == group:
882             packagelist = elm.find("packagelist")
883             found = True
884             break
885
886     if not found:
887         return pkglist
888
889     for require in elm.getiterator("packagereq"):
890         if require.tag.endswith("packagereq"):
891             pkgname = require.text
892         if pkgname not in pkglist:
893             pkglist.append(pkgname)
894
895     return pkglist
896
897 def is_statically_linked(binary):
898     return ", statically linked, " in runner.outs(['file', binary])
899
900 def setup_qemu_emulator(rootdir, arch):
901     # mount binfmt_misc if it doesn't exist
902     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
903         modprobecmd = find_binary_path("modprobe")
904         runner.show([modprobecmd, "binfmt_misc"])
905     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
906         mountcmd = find_binary_path("mount")
907         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
908
909     # qemu_emulator is a special case, we can't use find_binary_path
910     # qemu emulator should be a statically-linked executable file
911     if arch == "aarch64":
912         node = "/proc/sys/fs/binfmt_misc/aarch64"
913         if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
914             arm_binary = "qemu-arm64"
915         elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
916             arm_binary = "qemu-aarch64"
917         elif os.path.exists("/usr/bin/qemu-arm64-static"):
918             arm_binary = "qemu-arm64-static"
919         elif os.path.exists("/usr/bin/qemu-aarch64-static"):
920             arm_binary = "qemu-aarch64-static"
921         else:
922             raise CreatorError("Please install a statically-linked %s" % arm_binary)
923     elif arch == "mipsel":
924         node = "/proc/sys/fs/binfmt_misc/mipsel"
925         arm_binary = "qemu-mipsel"
926         if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
927             arm_binary = "qemu-mipsel-static"
928         if not os.path.exists("/usr/bin/%s" % arm_binary):
929             raise CreatorError("Please install a statically-linked %s" % arm_binary)
930     else:
931         node = "/proc/sys/fs/binfmt_misc/arm"
932         arm_binary = "qemu-arm"
933         if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
934             arm_binary = "qemu-arm-static"
935         if not os.path.exists("/usr/bin/%s" % arm_binary):
936             raise CreatorError("Please install a statically-linked %s" % arm_binary)
937
938     qemu_emulator = "/usr/bin/%s" % arm_binary
939
940     if not os.path.exists(rootdir + "/usr/bin"):
941         makedirs(rootdir + "/usr/bin")
942     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
943
944     # disable selinux, selinux will block qemu emulator to run
945     if os.path.exists("/usr/sbin/setenforce"):
946         msger.info('Try to disable selinux')
947         runner.show(["/usr/sbin/setenforce", "0"])
948
949     # register qemu emulator for interpreting other arch executable file
950     if not os.path.exists(node):
951         if arch == "aarch64":
952             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
953         elif arch == "mipsel":
954             qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
955         else:
956             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
957
958         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
959             fd.write(qemu_arm_string)
960
961     return qemu_emulator
962
963 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
964     def get_source_repometadata(repometadata):
965         src_repometadata=[]
966         for repo in repometadata:
967             if repo["name"].endswith("-source"):
968                 src_repometadata.append(repo)
969         if src_repometadata:
970             return src_repometadata
971         return None
972
973     def get_src_name(srpm):
974         m = SRPM_RE.match(srpm)
975         if m:
976             return m.group(1)
977         return None
978
979     src_repometadata = get_source_repometadata(repometadata)
980
981     if not src_repometadata:
982         msger.warning("No source repo found")
983         return None
984
985     src_pkgs = []
986     lpkgs_dict = {}
987     lpkgs_path = []
988     for repo in src_repometadata:
989         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
990         lpkgs_path += glob.glob(cachepath)
991
992     for lpkg in lpkgs_path:
993         lpkg_name = get_src_name(os.path.basename(lpkg))
994         lpkgs_dict[lpkg_name] = lpkg
995     localpkgs = lpkgs_dict.keys()
996
997     cached_count = 0
998     destdir = instroot+'/usr/src/SRPMS'
999     if not os.path.exists(destdir):
1000         os.makedirs(destdir)
1001
1002     srcpkgset = set()
1003     for _pkg in pkgs:
1004         srcpkg_name = get_source_name(_pkg, repometadata)
1005         if not srcpkg_name:
1006             continue
1007         srcpkgset.add(srcpkg_name)
1008
1009     for pkg in list(srcpkgset):
1010         if pkg in localpkgs:
1011             cached_count += 1
1012             shutil.copy(lpkgs_dict[pkg], destdir)
1013             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1014         else:
1015             src_pkg = get_package(pkg, src_repometadata, 'src')
1016             if src_pkg:
1017                 shutil.copy(src_pkg, destdir)
1018                 src_pkgs.append(src_pkg)
1019     msger.info("%d source packages gotten from cache" % cached_count)
1020
1021     return src_pkgs
1022
1023 def strip_end(text, suffix):
1024     if not text.endswith(suffix):
1025         return text
1026     return text[:-len(suffix)]