clean up unused code
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.utils.errors import CreatorError, SquashfsError
45 from mic.utils.fs_related import find_binary_path, makedirs
46 from mic.utils.grabber import myurlgrab
47 from mic.utils.proxy import get_proxy_for
48 from mic.utils import runner
49 from mic.utils import rpmmisc
50 from mic.utils.safeurl import SafeURL
51
52
53 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
54 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
55 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
56
57
58 def build_name(kscfg, release=None, prefix = None, suffix = None):
59     """Construct and return an image name string.
60
61     This is a utility function to help create sensible name and fslabel
62     strings. The name is constructed using the sans-prefix-and-extension
63     kickstart filename and the supplied prefix and suffix.
64
65     kscfg -- a path to a kickstart file
66     release --  a replacement to suffix for image release
67     prefix -- a prefix to prepend to the name; defaults to None, which causes
68               no prefix to be used
69     suffix -- a suffix to append to the name; defaults to None, which causes
70               a YYYYMMDDHHMM suffix to be used
71
72     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
73
74     """
75     name = os.path.basename(kscfg)
76     idx = name.rfind('.')
77     if idx >= 0:
78         name = name[:idx]
79
80     if release is not None:
81         suffix = ""
82     if prefix is None:
83         prefix = ""
84     if suffix is None:
85         suffix = time.strftime("%Y%m%d%H%M")
86
87     if name.startswith(prefix):
88         name = name[len(prefix):]
89
90     prefix = "%s-" % prefix if prefix else ""
91     suffix = "-%s" % suffix if suffix else ""
92
93     ret = prefix + name + suffix
94     return ret
95
96 def get_distro():
97     """Detect linux distribution, support "meego"
98     """
99
100     support_dists = ('SuSE',
101                      'debian',
102                      'fedora',
103                      'redhat',
104                      'centos',
105                      'meego',
106                      'moblin',
107                      'tizen')
108     try:
109         (dist, ver, id) = platform.linux_distribution( \
110                               supported_dists = support_dists)
111     except:
112         (dist, ver, id) = platform.dist( \
113                               supported_dists = support_dists)
114
115     return (dist, ver, id)
116
117 def get_hostname():
118     """Get hostname
119     """
120     return platform.node()
121
122 def get_hostname_distro_str():
123     """Get composited string for current linux distribution
124     """
125     (dist, ver, id) = get_distro()
126     hostname = get_hostname()
127
128     if not dist:
129         return "%s(Unknown Linux Distribution)" % hostname
130     else:
131         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
132         return distro_str.strip()
133
134 _LOOP_RULE_PTH = None
135
136 def hide_loopdev_presentation():
137     udev_rules = "80-prevent-loop-present.rules"
138     udev_rules_dir = [
139                        '/usr/lib/udev/rules.d/',
140                        '/lib/udev/rules.d/',
141                        '/etc/udev/rules.d/'
142                      ]
143
144     global _LOOP_RULE_PTH
145
146     for rdir in udev_rules_dir:
147         if os.path.exists(rdir):
148             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
149
150     if not _LOOP_RULE_PTH:
151         return
152
153     try:
154         with open(_LOOP_RULE_PTH, 'w') as wf:
155             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
156
157         runner.quiet('udevadm trigger')
158     except:
159         pass
160
161 def unhide_loopdev_presentation():
162     global _LOOP_RULE_PTH
163
164     if not _LOOP_RULE_PTH:
165         return
166
167     try:
168         os.unlink(_LOOP_RULE_PTH)
169         runner.quiet('udevadm trigger')
170     except:
171         pass
172
173 def extract_rpm(rpmfile, targetdir):
174     rpm2cpio = find_binary_path("rpm2cpio")
175     cpio = find_binary_path("cpio")
176
177     olddir = os.getcwd()
178     os.chdir(targetdir)
179
180     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
181     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
182     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
183                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
184     p1.stdout.close()
185     (sout, serr) = p2.communicate()
186     msger.verbose(sout or serr)
187
188     os.chdir(olddir)
189
190 def human_size(size):
191     """Return human readable string for Bytes size
192     """
193
194     if size <= 0:
195         return "0M"
196     import math
197     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
198     expo = int(math.log(size, 1024))
199     mant = float(size/math.pow(1024, expo))
200     return "{0:.1f}{1:s}".format(mant, measure[expo])
201
202 def get_block_size(file_obj):
203     """ Returns block size for file object 'file_obj'. Errors are indicated by
204     the 'IOError' exception. """
205
206     from fcntl import ioctl
207     import struct
208
209     # Get the block size of the host file-system for the image file by calling
210     # the FIGETBSZ ioctl (number 2).
211     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
212     return struct.unpack('I', binary_data)[0]
213
214 def check_space_pre_cp(src, dst):
215     """Check whether disk space is enough before 'cp' like
216     operations, else exception will be raised.
217     """
218
219     srcsize  = get_file_size(src) * 1024 * 1024
220     freesize = get_filesystem_avail(dst)
221     if srcsize > freesize:
222         raise CreatorError("space on %s(%s) is not enough for about %s files"
223                            % (dst, human_size(freesize), human_size(srcsize)))
224
225 def calc_hashes(file_path, hash_names, start = 0, end = None):
226     """ Calculate hashes for a file. The 'file_path' argument is the file
227     to calculate hash functions for, 'start' and 'end' are the starting and
228     ending file offset to calculate the has functions for. The 'hash_names'
229     argument is a list of hash names to calculate. Returns the the list
230     of calculated hash values in the hexadecimal form in the same order
231     as 'hash_names'.
232     """
233     if end == None:
234         end = os.path.getsize(file_path)
235
236     chunk_size = 65536
237     to_read = end - start
238     read = 0
239
240     hashes = []
241     for hash_name in hash_names:
242         hashes.append(hashlib.new(hash_name))
243
244     with open(file_path, "rb") as f:
245         f.seek(start)
246
247         while read < to_read:
248             if read + chunk_size > to_read:
249                 chunk_size = to_read - read
250             chunk = f.read(chunk_size)
251             for hash_obj in hashes:
252                 hash_obj.update(chunk)
253             read += chunk_size
254
255     result = []
256     for hash_obj in hashes:
257         result.append(hash_obj.hexdigest())
258
259     return result
260
261 def get_md5sum(fpath):
262     return calc_hashes(fpath, ('md5', ))[0]
263
264
265 def normalize_ksfile(ksconf, release, arch):
266     '''
267     Return the name of a normalized ks file in which macro variables
268     @BUILD_ID@ and @ARCH@ are replace with real values.
269
270     The original ks file is returned if no special macro is used, otherwise
271     a temp file is created and returned, which will be deleted when program
272     exits normally.
273     '''
274
275     if not release:
276         release = "latest"
277     if not arch or re.match(r'i.86', arch):
278         arch = "ia32"
279
280     with open(ksconf) as f:
281         ksc = f.read()
282
283     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
284         return ksconf
285
286     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
287     ksc = ksc.replace("@ARCH@", arch)
288     ksc = ksc.replace("@BUILD_ID@", release)
289
290     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
291     os.write(fd, ksc)
292     os.close(fd)
293
294     msger.debug('normalized ks file:%s' % ksconf)
295
296     def remove_temp_ks():
297         try:
298             os.unlink(ksconf)
299         except OSError, err:
300             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
301
302     import atexit
303     atexit.register(remove_temp_ks)
304
305     return ksconf
306
307
308 def _check_mic_chroot(rootdir):
309     def _path(path):
310         return rootdir.rstrip('/') + path
311
312     release_files = map(_path, [ "/etc/moblin-release",
313                                  "/etc/meego-release",
314                                  "/etc/tizen-release"])
315
316     if not any(map(os.path.exists, release_files)):
317         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
318
319     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
320         msger.warning("Failed to find kernel module under %s" % rootdir)
321
322     return
323
324 def selinux_check(arch, fstypes):
325     try:
326         getenforce = find_binary_path('getenforce')
327     except CreatorError:
328         return
329
330     selinux_status = runner.outs([getenforce])
331     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
332         raise CreatorError("Can't create arm image if selinux is enabled, "
333                            "please run 'setenforce 0' to disable selinux")
334
335     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
336     if use_btrfs and selinux_status == "Enforcing":
337         raise CreatorError("Can't create btrfs image if selinux is enabled,"
338                            " please run 'setenforce 0' to disable selinux")
339
340 def get_image_type(path):
341     def _get_extension_name(path):
342         match = re.search("(?<=\.)\w+$", path)
343         if match:
344             return match.group(0)
345         else:
346             return None
347
348     if os.path.isdir(path):
349         _check_mic_chroot(path)
350         return "fs"
351
352     maptab = {
353               "tar": "loop",
354               "raw":"raw",
355               "vmdk":"vmdk",
356               "vdi":"vdi",
357               "iso":"livecd",
358               "usbimg":"liveusb",
359              }
360
361     extension = _get_extension_name(path)
362     if extension in maptab:
363         return maptab[extension]
364
365     fd = open(path, "rb")
366     file_header = fd.read(1024)
367     fd.close()
368     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
369     if file_header[0:len(vdi_flag)] == vdi_flag:
370         return maptab["vdi"]
371
372     output = runner.outs(['file', path])
373     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
374     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
375     rawptn = re.compile(r".*x86 boot sector.*")
376     vmdkptn = re.compile(r".*VMware. disk image.*")
377     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
378     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
379     btrfsimgptn = re.compile(r".*BTRFS.*")
380     if isoptn.match(output):
381         return maptab["iso"]
382     elif usbimgptn.match(output):
383         return maptab["usbimg"]
384     elif rawptn.match(output):
385         return maptab["raw"]
386     elif vmdkptn.match(output):
387         return maptab["vmdk"]
388     elif ext3fsimgptn.match(output):
389         return "ext3fsimg"
390     elif ext4fsimgptn.match(output):
391         return "ext4fsimg"
392     elif btrfsimgptn.match(output):
393         return "btrfsimg"
394     else:
395         raise CreatorError("Cannot detect the type of image: %s" % path)
396
397
398 def get_file_size(filename):
399     """ Return size in MB unit """
400     cmd = ['du', "-s", "-b", "-B", "1M", filename]
401     rc, duOutput  = runner.runtool(cmd)
402     if rc != 0:
403         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
404     size1 = int(duOutput.split()[0])
405
406     cmd = ['du', "-s", "-B", "1M", filename]
407     rc, duOutput = runner.runtool(cmd)
408     if rc != 0:
409         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
410
411     size2 = int(duOutput.split()[0])
412     return max(size1, size2)
413
414
415 def get_filesystem_avail(fs):
416     vfstat = os.statvfs(fs)
417     return vfstat.f_bavail * vfstat.f_bsize
418
419 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
420     #convert disk format
421     if dstfmt != "raw":
422         raise CreatorError("Invalid destination image format: %s" % dstfmt)
423     msger.debug("converting %s image to %s" % (srcimg, dstimg))
424     if srcfmt == "vmdk":
425         path = find_binary_path("qemu-img")
426         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
427     elif srcfmt == "vdi":
428         path = find_binary_path("VBoxManage")
429         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
430     else:
431         raise CreatorError("Invalid soure image format: %s" % srcfmt)
432
433     rc = runner.show(argv)
434     if rc == 0:
435         msger.debug("convert successful")
436     if rc != 0:
437         raise CreatorError("Unable to convert disk to %s" % dstfmt)
438
439 def uncompress_squashfs(squashfsimg, outdir):
440     """Uncompress file system from squshfs image"""
441     unsquashfs = find_binary_path("unsquashfs")
442     args = [ unsquashfs, "-d", outdir, squashfsimg ]
443     rc = runner.show(args)
444     if (rc != 0):
445         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
446
447 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
448     """ FIXME: use the dir in mic.conf instead """
449
450     makedirs(dir)
451     return tempfile.mkdtemp(dir = dir, prefix = prefix)
452
453 def get_repostrs_from_ks(ks):
454     def _get_temp_reponame(baseurl):
455         md5obj = hashlib.md5(baseurl)
456         tmpreponame = "%s" % md5obj.hexdigest()
457         return tmpreponame
458
459     kickstart_repos = []
460
461     for repodata in ks.handler.repo.repoList:
462         repo = {}
463         for attr in ('name',
464                      'baseurl',
465                      'mirrorlist',
466                      'includepkgs', # val is list
467                      'excludepkgs', # val is list
468                      'cost',    # int
469                      'priority',# int
470                      'save',
471                      'proxy',
472                      'proxyuser',
473                      'proxypasswd',
474                      'proxypasswd',
475                      'debuginfo',
476                      'source',
477                      'gpgkey',
478                      'ssl_verify'):
479             if hasattr(repodata, attr) and getattr(repodata, attr):
480                 repo[attr] = getattr(repodata, attr)
481
482         if 'name' not in repo:
483             repo['name'] = _get_temp_reponame(repodata.baseurl)
484         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
485             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
486                                       getattr(repodata, 'user', None),
487                                       getattr(repodata, 'passwd', None))
488
489         kickstart_repos.append(repo)
490
491     return kickstart_repos
492
493 def _get_uncompressed_data_from_url(url, filename, proxies):
494     filename = myurlgrab(url.full, filename, proxies)
495     suffix = None
496     if filename.endswith(".gz"):
497         suffix = ".gz"
498         runner.quiet(['gunzip', "-f", filename])
499     elif filename.endswith(".bz2"):
500         suffix = ".bz2"
501         runner.quiet(['bunzip2', "-f", filename])
502     if suffix:
503         filename = filename.replace(suffix, "")
504     return filename
505
506 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
507                             sumtype=None, checksum=None):
508     url = baseurl.join(filename)
509     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
510     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
511         filename = os.path.splitext(filename_tmp)[0]
512     else:
513         filename = filename_tmp
514     if sumtype and checksum and os.path.exists(filename):
515         try:
516             sumcmd = find_binary_path("%ssum" % sumtype)
517         except:
518             file_checksum = None
519         else:
520             file_checksum = runner.outs([sumcmd, filename]).split()[0]
521
522         if file_checksum and file_checksum == checksum:
523             return filename
524
525     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
526
527 def get_metadata_from_repos(repos, cachedir):
528     my_repo_metadata = []
529     for repo in repos:
530         reponame = repo.name
531         baseurl = repo.baseurl
532
533         if hasattr(repo, 'proxy'):
534             proxy = repo.proxy
535         else:
536             proxy = get_proxy_for(baseurl)
537
538         proxies = None
539         if proxy:
540             proxies = {str(baseurl.split(":")[0]): str(proxy)}
541
542         makedirs(os.path.join(cachedir, reponame))
543         url = baseurl.join("repodata/repomd.xml")
544         filename = os.path.join(cachedir, reponame, 'repomd.xml')
545         repomd = myurlgrab(url.full, filename, proxies)
546         try:
547             root = xmlparse(repomd)
548         except SyntaxError:
549             raise CreatorError("repomd.xml syntax error.")
550
551         ns = root.getroot().tag
552         ns = ns[0:ns.rindex("}")+1]
553
554         filepaths = {}
555         checksums = {}
556         sumtypes = {}
557
558         for elm in root.getiterator("%sdata" % ns):
559             if elm.attrib["type"] == "patterns":
560                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
561                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
562                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
563                 break
564
565         for elm in root.getiterator("%sdata" % ns):
566             if elm.attrib["type"] in ("group_gz", "group"):
567                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
568                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
569                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
570                 break
571
572         primary_type = None
573         for elm in root.getiterator("%sdata" % ns):
574             if elm.attrib["type"] in ("primary_db", "primary"):
575                 primary_type = elm.attrib["type"]
576                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
577                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
578                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
579                 break
580
581         if not primary_type:
582             continue
583
584         for item in ("primary", "patterns", "comps"):
585             if item not in filepaths:
586                 filepaths[item] = None
587                 continue
588             if not filepaths[item]:
589                 continue
590             filepaths[item] = _get_metadata_from_repo(baseurl,
591                                                       proxies,
592                                                       cachedir,
593                                                       reponame,
594                                                       filepaths[item],
595                                                       sumtypes[item],
596                                                       checksums[item])
597
598         """ Get repo key """
599         try:
600             repokey = _get_metadata_from_repo(baseurl,
601                                               proxies,
602                                               cachedir,
603                                               reponame,
604                                               "repodata/repomd.xml.key")
605         except CreatorError:
606             repokey = None
607             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
608
609         my_repo_metadata.append({"name":reponame,
610                                  "baseurl":baseurl,
611                                  "repomd":repomd,
612                                  "primary":filepaths['primary'],
613                                  "cachedir":cachedir,
614                                  "proxies":proxies,
615                                  "patterns":filepaths['patterns'],
616                                  "comps":filepaths['comps'],
617                                  "repokey":repokey})
618
619     return my_repo_metadata
620
621 def get_rpmver_in_repo(repometadata):
622     for repo in repometadata:
623         if repo["primary"].endswith(".xml"):
624             root = xmlparse(repo["primary"])
625             ns = root.getroot().tag
626             ns = ns[0:ns.rindex("}")+1]
627
628             versionlist = []
629             for elm in root.getiterator("%spackage" % ns):
630                 if elm.find("%sname" % ns).text == 'rpm':
631                     for node in elm.getchildren():
632                         if node.tag == "%sversion" % ns:
633                             versionlist.append(node.attrib['ver'])
634
635             if versionlist:
636                 return reversed(
637                          sorted(
638                            versionlist,
639                            key = lambda ver: map(int, ver.split('.')))).next()
640
641         elif repo["primary"].endswith(".sqlite"):
642             con = sqlite.connect(repo["primary"])
643             for row in con.execute("select version from packages where "
644                                    "name=\"rpm\" ORDER by version DESC"):
645                 con.close()
646                 return row[0]
647
648     return None
649
650 def get_arch(repometadata):
651     archlist = []
652     for repo in repometadata:
653         if repo["primary"].endswith(".xml"):
654             root = xmlparse(repo["primary"])
655             ns = root.getroot().tag
656             ns = ns[0:ns.rindex("}")+1]
657             for elm in root.getiterator("%spackage" % ns):
658                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
659                     arch = elm.find("%sarch" % ns).text
660                     if arch not in archlist:
661                         archlist.append(arch)
662         elif repo["primary"].endswith(".sqlite"):
663             con = sqlite.connect(repo["primary"])
664             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
665                 if row[0] not in archlist:
666                     archlist.append(row[0])
667
668             con.close()
669
670     uniq_arch = []
671     for i in range(len(archlist)):
672         if archlist[i] not in rpmmisc.archPolicies.keys():
673             continue
674         need_append = True
675         j = 0
676         while j < len(uniq_arch):
677             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
678                 need_append = False
679                 break
680             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
681                 if need_append:
682                     uniq_arch[j] = archlist[i]
683                     need_append = False
684                 else:
685                     uniq_arch.remove(uniq_arch[j])
686                     continue
687             j += 1
688         if need_append:
689              uniq_arch.append(archlist[i])
690
691     return uniq_arch, archlist
692
693 def get_package(pkg, repometadata, arch = None):
694     ver = ""
695     target_repo = None
696     if not arch:
697         arches = []
698     elif arch not in rpmmisc.archPolicies:
699         arches = [arch]
700     else:
701         arches = rpmmisc.archPolicies[arch].split(':')
702         arches.append('noarch')
703
704     for repo in repometadata:
705         if repo["primary"].endswith(".xml"):
706             root = xmlparse(repo["primary"])
707             ns = root.getroot().tag
708             ns = ns[0:ns.rindex("}")+1]
709             for elm in root.getiterator("%spackage" % ns):
710                 if elm.find("%sname" % ns).text == pkg:
711                     if elm.find("%sarch" % ns).text in arches:
712                         version = elm.find("%sversion" % ns)
713                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
714                         if tmpver > ver:
715                             ver = tmpver
716                             location = elm.find("%slocation" % ns)
717                             pkgpath = "%s" % location.attrib['href']
718                             target_repo = repo
719                         break
720         if repo["primary"].endswith(".sqlite"):
721             con = sqlite.connect(repo["primary"])
722             if arch:
723                 sql = 'select version, release, location_href from packages ' \
724                       'where name = "%s" and arch IN ("%s")' % \
725                       (pkg, '","'.join(arches))
726                 for row in con.execute(sql):
727                     tmpver = "%s-%s" % (row[0], row[1])
728                     if tmpver > ver:
729                         ver = tmpver
730                         pkgpath = "%s" % row[2]
731                         target_repo = repo
732                     break
733             else:
734                 sql = 'select version, release, location_href from packages ' \
735                       'where name = "%s"' % pkg
736                 for row in con.execute(sql):
737                     tmpver = "%s-%s" % (row[0], row[1])
738                     if tmpver > ver:
739                         ver = tmpver
740                         pkgpath = "%s" % row[2]
741                         target_repo = repo
742                     break
743             con.close()
744     if target_repo:
745         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
746         url = target_repo["baseurl"].join(pkgpath)
747         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
748         if os.path.exists(filename):
749             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
750             if ret == 0:
751                 return filename
752
753             msger.warning("package %s is damaged: %s" %
754                           (os.path.basename(filename), filename))
755             os.unlink(filename)
756
757         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
758         return pkg
759     else:
760         return None
761
762 def get_source_name(pkg, repometadata):
763
764     def get_bin_name(pkg):
765         m = RPM_RE.match(pkg)
766         if m:
767             return m.group(1)
768         return None
769
770     def get_src_name(srpm):
771         m = SRPM_RE.match(srpm)
772         if m:
773             return m.group(1)
774         return None
775
776     ver = ""
777     target_repo = None
778
779     pkg_name = get_bin_name(pkg)
780     if not pkg_name:
781         return None
782
783     for repo in repometadata:
784         if repo["primary"].endswith(".xml"):
785             root = xmlparse(repo["primary"])
786             ns = root.getroot().tag
787             ns = ns[0:ns.rindex("}")+1]
788             for elm in root.getiterator("%spackage" % ns):
789                 if elm.find("%sname" % ns).text == pkg_name:
790                     if elm.find("%sarch" % ns).text != "src":
791                         version = elm.find("%sversion" % ns)
792                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
793                         if tmpver > ver:
794                             ver = tmpver
795                             fmt = elm.find("%sformat" % ns)
796                             if fmt:
797                                 fns = fmt.getchildren()[0].tag
798                                 fns = fns[0:fns.rindex("}")+1]
799                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
800                                 target_repo = repo
801                         break
802
803         if repo["primary"].endswith(".sqlite"):
804             con = sqlite.connect(repo["primary"])
805             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
806                 tmpver = "%s-%s" % (row[0], row[1])
807                 if tmpver > ver:
808                     pkgpath = "%s" % row[2]
809                     target_repo = repo
810                 break
811             con.close()
812     if target_repo:
813         return get_src_name(pkgpath)
814     else:
815         return None
816
817 def get_pkglist_in_patterns(group, patterns):
818     found = False
819     pkglist = []
820     try:
821         root = xmlparse(patterns)
822     except SyntaxError:
823         raise SyntaxError("%s syntax error." % patterns)
824
825     for elm in list(root.getroot()):
826         ns = elm.tag
827         ns = ns[0:ns.rindex("}")+1]
828         name = elm.find("%sname" % ns)
829         summary = elm.find("%ssummary" % ns)
830         if name.text == group or summary.text == group:
831             found = True
832             break
833
834     if not found:
835         return pkglist
836
837     found = False
838     for requires in list(elm):
839         if requires.tag.endswith("requires"):
840             found = True
841             break
842
843     if not found:
844         return pkglist
845
846     for pkg in list(requires):
847         pkgname = pkg.attrib["name"]
848         if pkgname not in pkglist:
849             pkglist.append(pkgname)
850
851     return pkglist
852
853 def get_pkglist_in_comps(group, comps):
854     found = False
855     pkglist = []
856     try:
857         root = xmlparse(comps)
858     except SyntaxError:
859         raise SyntaxError("%s syntax error." % comps)
860
861     for elm in root.getiterator("group"):
862         id = elm.find("id")
863         name = elm.find("name")
864         if id.text == group or name.text == group:
865             packagelist = elm.find("packagelist")
866             found = True
867             break
868
869     if not found:
870         return pkglist
871
872     for require in elm.getiterator("packagereq"):
873         if require.tag.endswith("packagereq"):
874             pkgname = require.text
875         if pkgname not in pkglist:
876             pkglist.append(pkgname)
877
878     return pkglist
879
880 def is_statically_linked(binary):
881     return ", statically linked, " in runner.outs(['file', binary])
882
883 def setup_qemu_emulator(rootdir, arch):
884     # mount binfmt_misc if it doesn't exist
885     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
886         modprobecmd = find_binary_path("modprobe")
887         runner.show([modprobecmd, "binfmt_misc"])
888     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
889         mountcmd = find_binary_path("mount")
890         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
891
892     # qemu_emulator is a special case, we can't use find_binary_path
893     # qemu emulator should be a statically-linked executable file
894     if arch == "aarch64":
895         arm_binary = "qemu-arm64"
896         node = "/proc/sys/fs/binfmt_misc/aarch64"
897     else:
898         arm_binary = "qemu-arm"
899         node = "/proc/sys/fs/binfmt_misc/arm"
900
901     qemu_emulator = "/usr/bin/%s" % arm_binary
902     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
903         qemu_emulator = "/usr/bin/%s-static" % arm_binary
904     if not os.path.exists(qemu_emulator):
905         raise CreatorError("Please install a statically-linked %s" % arm_binary)
906
907     if not os.path.exists(rootdir + "/usr/bin"):
908         makedirs(rootdir + "/usr/bin")
909     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
910
911     # disable selinux, selinux will block qemu emulator to run
912     if os.path.exists("/usr/sbin/setenforce"):
913         msger.info('Try to disable selinux')
914         runner.show(["/usr/sbin/setenforce", "0"])
915
916     # unregister it if it has been registered and is a dynamically-linked executable
917     if os.path.exists(node):
918         qemu_unregister_string = "-1\n"
919         with open(node, "w") as fd:
920             fd.write(qemu_unregister_string)
921
922     # register qemu emulator for interpreting other arch executable file
923     if not os.path.exists(node):
924         if arch == "aarch64":
925             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
926         else:
927             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
928         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
929             fd.write(qemu_arm_string)
930
931     return qemu_emulator
932
933 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
934     def get_source_repometadata(repometadata):
935         src_repometadata=[]
936         for repo in repometadata:
937             if repo["name"].endswith("-source"):
938                 src_repometadata.append(repo)
939         if src_repometadata:
940             return src_repometadata
941         return None
942
943     def get_src_name(srpm):
944         m = SRPM_RE.match(srpm)
945         if m:
946             return m.group(1)
947         return None
948
949     src_repometadata = get_source_repometadata(repometadata)
950
951     if not src_repometadata:
952         msger.warning("No source repo found")
953         return None
954
955     src_pkgs = []
956     lpkgs_dict = {}
957     lpkgs_path = []
958     for repo in src_repometadata:
959         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
960         lpkgs_path += glob.glob(cachepath)
961
962     for lpkg in lpkgs_path:
963         lpkg_name = get_src_name(os.path.basename(lpkg))
964         lpkgs_dict[lpkg_name] = lpkg
965     localpkgs = lpkgs_dict.keys()
966
967     cached_count = 0
968     destdir = instroot+'/usr/src/SRPMS'
969     if not os.path.exists(destdir):
970         os.makedirs(destdir)
971
972     srcpkgset = set()
973     for _pkg in pkgs:
974         srcpkg_name = get_source_name(_pkg, repometadata)
975         if not srcpkg_name:
976             continue
977         srcpkgset.add(srcpkg_name)
978
979     for pkg in list(srcpkgset):
980         if pkg in localpkgs:
981             cached_count += 1
982             shutil.copy(lpkgs_dict[pkg], destdir)
983             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
984         else:
985             src_pkg = get_package(pkg, src_repometadata, 'src')
986             if src_pkg:
987                 shutil.copy(src_pkg, destdir)
988                 src_pkgs.append(src_pkg)
989     msger.info("%d source packages gotten from cache" % cached_count)
990
991     return src_pkgs
992
993 def strip_end(text, suffix):
994     if not text.endswith(suffix):
995         return text
996     return text[:-len(suffix)]