Add sha1sum and sha256sum
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.utils.errors import CreatorError, SquashfsError
45 from mic.utils.fs_related import find_binary_path, makedirs
46 from mic.utils.grabber import myurlgrab
47 from mic.utils.proxy import get_proxy_for
48 from mic.utils import runner
49 from mic.utils import rpmmisc
50 from mic.utils.safeurl import SafeURL
51
52
53 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
54 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
55 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
56
57
58 def build_name(kscfg, release=None, prefix = None, suffix = None):
59     """Construct and return an image name string.
60
61     This is a utility function to help create sensible name and fslabel
62     strings. The name is constructed using the sans-prefix-and-extension
63     kickstart filename and the supplied prefix and suffix.
64
65     kscfg -- a path to a kickstart file
66     release --  a replacement to suffix for image release
67     prefix -- a prefix to prepend to the name; defaults to None, which causes
68               no prefix to be used
69     suffix -- a suffix to append to the name; defaults to None, which causes
70               a YYYYMMDDHHMM suffix to be used
71
72     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
73
74     """
75     name = os.path.basename(kscfg)
76     idx = name.rfind('.')
77     if idx >= 0:
78         name = name[:idx]
79
80     if release is not None:
81         suffix = ""
82     if prefix is None:
83         prefix = ""
84     if suffix is None:
85         suffix = time.strftime("%Y%m%d%H%M")
86
87     if name.startswith(prefix):
88         name = name[len(prefix):]
89
90     prefix = "%s-" % prefix if prefix else ""
91     suffix = "-%s" % suffix if suffix else ""
92
93     ret = prefix + name + suffix
94     return ret
95
96 def get_distro():
97     """Detect linux distribution, support "meego"
98     """
99
100     support_dists = ('SuSE',
101                      'debian',
102                      'fedora',
103                      'redhat',
104                      'centos',
105                      'meego',
106                      'moblin',
107                      'tizen')
108     try:
109         (dist, ver, id) = platform.linux_distribution( \
110                               supported_dists = support_dists)
111     except:
112         (dist, ver, id) = platform.dist( \
113                               supported_dists = support_dists)
114
115     return (dist, ver, id)
116
117 def get_hostname():
118     """Get hostname
119     """
120     return platform.node()
121
122 def get_hostname_distro_str():
123     """Get composited string for current linux distribution
124     """
125     (dist, ver, id) = get_distro()
126     hostname = get_hostname()
127
128     if not dist:
129         return "%s(Unknown Linux Distribution)" % hostname
130     else:
131         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
132         return distro_str.strip()
133
134 _LOOP_RULE_PTH = None
135
136 def hide_loopdev_presentation():
137     udev_rules = "80-prevent-loop-present.rules"
138     udev_rules_dir = [
139                        '/usr/lib/udev/rules.d/',
140                        '/lib/udev/rules.d/',
141                        '/etc/udev/rules.d/'
142                      ]
143
144     global _LOOP_RULE_PTH
145
146     for rdir in udev_rules_dir:
147         if os.path.exists(rdir):
148             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
149
150     if not _LOOP_RULE_PTH:
151         return
152
153     try:
154         with open(_LOOP_RULE_PTH, 'w') as wf:
155             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
156
157         runner.quiet('udevadm trigger')
158     except:
159         pass
160
161 def unhide_loopdev_presentation():
162     global _LOOP_RULE_PTH
163
164     if not _LOOP_RULE_PTH:
165         return
166
167     try:
168         os.unlink(_LOOP_RULE_PTH)
169         runner.quiet('udevadm trigger')
170     except:
171         pass
172
173 def extract_rpm(rpmfile, targetdir):
174     rpm2cpio = find_binary_path("rpm2cpio")
175     cpio = find_binary_path("cpio")
176
177     olddir = os.getcwd()
178     os.chdir(targetdir)
179
180     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
181     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
182     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
183                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
184     p1.stdout.close()
185     (sout, serr) = p2.communicate()
186     msger.verbose(sout or serr)
187
188     os.chdir(olddir)
189
190 def human_size(size):
191     """Return human readable string for Bytes size
192     """
193
194     if size <= 0:
195         return "0M"
196     import math
197     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
198     expo = int(math.log(size, 1024))
199     mant = float(size/math.pow(1024, expo))
200     return "{0:.1f}{1:s}".format(mant, measure[expo])
201
202 def get_block_size(file_obj):
203     """ Returns block size for file object 'file_obj'. Errors are indicated by
204     the 'IOError' exception. """
205
206     from fcntl import ioctl
207     import struct
208
209     # Get the block size of the host file-system for the image file by calling
210     # the FIGETBSZ ioctl (number 2).
211     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
212     return struct.unpack('I', binary_data)[0]
213
214 def check_space_pre_cp(src, dst):
215     """Check whether disk space is enough before 'cp' like
216     operations, else exception will be raised.
217     """
218
219     srcsize  = get_file_size(src) * 1024 * 1024
220     freesize = get_filesystem_avail(dst)
221     if srcsize > freesize:
222         raise CreatorError("space on %s(%s) is not enough for about %s files"
223                            % (dst, human_size(freesize), human_size(srcsize)))
224
225 def calc_hashes(file_path, hash_names, start = 0, end = None):
226     """ Calculate hashes for a file. The 'file_path' argument is the file
227     to calculate hash functions for, 'start' and 'end' are the starting and
228     ending file offset to calculate the has functions for. The 'hash_names'
229     argument is a list of hash names to calculate. Returns the the list
230     of calculated hash values in the hexadecimal form in the same order
231     as 'hash_names'.
232     """
233     if end == None:
234         end = os.path.getsize(file_path)
235
236     chunk_size = 65536
237     to_read = end - start
238     read = 0
239
240     hashes = []
241     for hash_name in hash_names:
242         hashes.append(hashlib.new(hash_name))
243
244     with open(file_path, "rb") as f:
245         f.seek(start)
246
247         while read < to_read:
248             if read + chunk_size > to_read:
249                 chunk_size = to_read - read
250             chunk = f.read(chunk_size)
251             for hash_obj in hashes:
252                 hash_obj.update(chunk)
253             read += chunk_size
254
255     result = []
256     for hash_obj in hashes:
257         result.append(hash_obj.hexdigest())
258
259     return result
260
261 def get_md5sum(fpath):
262     return calc_hashes(fpath, ('md5', ))[0]
263
264 def get_sha1sum(fpath):
265     return calc_hashes(fpath, ('sha1', ))[0]
266
267 def get_sha256sum(fpath):
268     return calc_hashes(fpath, ('sha256', ))[0]
269
270 def normalize_ksfile(ksconf, release, arch):
271     '''
272     Return the name of a normalized ks file in which macro variables
273     @BUILD_ID@ and @ARCH@ are replace with real values.
274
275     The original ks file is returned if no special macro is used, otherwise
276     a temp file is created and returned, which will be deleted when program
277     exits normally.
278     '''
279
280     if not release:
281         release = "latest"
282     if not arch or re.match(r'i.86', arch):
283         arch = "ia32"
284
285     with open(ksconf) as f:
286         ksc = f.read()
287
288     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
289         return ksconf
290
291     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
292     ksc = ksc.replace("@ARCH@", arch)
293     ksc = ksc.replace("@BUILD_ID@", release)
294
295     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
296     os.write(fd, ksc)
297     os.close(fd)
298
299     msger.debug('normalized ks file:%s' % ksconf)
300
301     def remove_temp_ks():
302         try:
303             os.unlink(ksconf)
304         except OSError, err:
305             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
306
307     import atexit
308     atexit.register(remove_temp_ks)
309
310     return ksconf
311
312
313 def _check_mic_chroot(rootdir):
314     def _path(path):
315         return rootdir.rstrip('/') + path
316
317     release_files = map(_path, [ "/etc/moblin-release",
318                                  "/etc/meego-release",
319                                  "/etc/tizen-release"])
320
321     if not any(map(os.path.exists, release_files)):
322         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
323
324     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
325         msger.warning("Failed to find kernel module under %s" % rootdir)
326
327     return
328
329 def selinux_check(arch, fstypes):
330     try:
331         getenforce = find_binary_path('getenforce')
332     except CreatorError:
333         return
334
335     selinux_status = runner.outs([getenforce])
336     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
337         raise CreatorError("Can't create arm image if selinux is enabled, "
338                            "please run 'setenforce 0' to disable selinux")
339
340     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
341     if use_btrfs and selinux_status == "Enforcing":
342         raise CreatorError("Can't create btrfs image if selinux is enabled,"
343                            " please run 'setenforce 0' to disable selinux")
344
345 def get_image_type(path):
346     def _get_extension_name(path):
347         match = re.search("(?<=\.)\w+$", path)
348         if match:
349             return match.group(0)
350         else:
351             return None
352
353     if os.path.isdir(path):
354         _check_mic_chroot(path)
355         return "fs"
356
357     maptab = {
358               "tar": "loop",
359               "raw":"raw",
360               "vmdk":"vmdk",
361               "vdi":"vdi",
362               "iso":"livecd",
363               "usbimg":"liveusb",
364              }
365
366     extension = _get_extension_name(path)
367     if extension in maptab:
368         return maptab[extension]
369
370     fd = open(path, "rb")
371     file_header = fd.read(1024)
372     fd.close()
373     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
374     if file_header[0:len(vdi_flag)] == vdi_flag:
375         return maptab["vdi"]
376
377     output = runner.outs(['file', path])
378     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
379     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
380     rawptn = re.compile(r".*x86 boot sector.*")
381     vmdkptn = re.compile(r".*VMware. disk image.*")
382     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
383     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
384     btrfsimgptn = re.compile(r".*BTRFS.*")
385     if isoptn.match(output):
386         return maptab["iso"]
387     elif usbimgptn.match(output):
388         return maptab["usbimg"]
389     elif rawptn.match(output):
390         return maptab["raw"]
391     elif vmdkptn.match(output):
392         return maptab["vmdk"]
393     elif ext3fsimgptn.match(output):
394         return "ext3fsimg"
395     elif ext4fsimgptn.match(output):
396         return "ext4fsimg"
397     elif btrfsimgptn.match(output):
398         return "btrfsimg"
399     else:
400         raise CreatorError("Cannot detect the type of image: %s" % path)
401
402
403 def get_file_size(filename):
404     """ Return size in MB unit """
405     cmd = ['du', "-s", "-b", "-B", "1M", filename]
406     rc, duOutput  = runner.runtool(cmd)
407     if rc != 0:
408         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
409     size1 = int(duOutput.split()[0])
410
411     cmd = ['du', "-s", "-B", "1M", filename]
412     rc, duOutput = runner.runtool(cmd)
413     if rc != 0:
414         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
415
416     size2 = int(duOutput.split()[0])
417     return max(size1, size2)
418
419
420 def get_filesystem_avail(fs):
421     vfstat = os.statvfs(fs)
422     return vfstat.f_bavail * vfstat.f_bsize
423
424 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
425     #convert disk format
426     if dstfmt != "raw":
427         raise CreatorError("Invalid destination image format: %s" % dstfmt)
428     msger.debug("converting %s image to %s" % (srcimg, dstimg))
429     if srcfmt == "vmdk":
430         path = find_binary_path("qemu-img")
431         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
432     elif srcfmt == "vdi":
433         path = find_binary_path("VBoxManage")
434         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
435     else:
436         raise CreatorError("Invalid soure image format: %s" % srcfmt)
437
438     rc = runner.show(argv)
439     if rc == 0:
440         msger.debug("convert successful")
441     if rc != 0:
442         raise CreatorError("Unable to convert disk to %s" % dstfmt)
443
444 def uncompress_squashfs(squashfsimg, outdir):
445     """Uncompress file system from squshfs image"""
446     unsquashfs = find_binary_path("unsquashfs")
447     args = [ unsquashfs, "-d", outdir, squashfsimg ]
448     rc = runner.show(args)
449     if (rc != 0):
450         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
451
452 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
453     """ FIXME: use the dir in mic.conf instead """
454
455     makedirs(dir)
456     return tempfile.mkdtemp(dir = dir, prefix = prefix)
457
458 def get_repostrs_from_ks(ks):
459     def _get_temp_reponame(baseurl):
460         md5obj = hashlib.md5(baseurl)
461         tmpreponame = "%s" % md5obj.hexdigest()
462         return tmpreponame
463
464     kickstart_repos = []
465
466     for repodata in ks.handler.repo.repoList:
467         repo = {}
468         for attr in ('name',
469                      'baseurl',
470                      'mirrorlist',
471                      'includepkgs', # val is list
472                      'excludepkgs', # val is list
473                      'cost',    # int
474                      'priority',# int
475                      'save',
476                      'proxy',
477                      'proxyuser',
478                      'proxypasswd',
479                      'proxypasswd',
480                      'debuginfo',
481                      'source',
482                      'gpgkey',
483                      'ssl_verify'):
484             if hasattr(repodata, attr) and getattr(repodata, attr):
485                 repo[attr] = getattr(repodata, attr)
486
487         if 'name' not in repo:
488             repo['name'] = _get_temp_reponame(repodata.baseurl)
489         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
490             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
491                                       getattr(repodata, 'user', None),
492                                       getattr(repodata, 'passwd', None))
493
494         kickstart_repos.append(repo)
495
496     return kickstart_repos
497
498 def _get_uncompressed_data_from_url(url, filename, proxies):
499     filename = myurlgrab(url.full, filename, proxies)
500     suffix = None
501     if filename.endswith(".gz"):
502         suffix = ".gz"
503         runner.quiet(['gunzip', "-f", filename])
504     elif filename.endswith(".bz2"):
505         suffix = ".bz2"
506         runner.quiet(['bunzip2', "-f", filename])
507     if suffix:
508         filename = filename.replace(suffix, "")
509     return filename
510
511 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
512                             sumtype=None, checksum=None):
513     url = baseurl.join(filename)
514     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
515     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
516         filename = os.path.splitext(filename_tmp)[0]
517     else:
518         filename = filename_tmp
519     if sumtype and checksum and os.path.exists(filename):
520         try:
521             sumcmd = find_binary_path("%ssum" % sumtype)
522         except:
523             file_checksum = None
524         else:
525             file_checksum = runner.outs([sumcmd, filename]).split()[0]
526
527         if file_checksum and file_checksum == checksum:
528             return filename
529
530     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
531
532 def get_metadata_from_repos(repos, cachedir):
533     my_repo_metadata = []
534     for repo in repos:
535         reponame = repo.name
536         baseurl = repo.baseurl
537
538         if hasattr(repo, 'proxy'):
539             proxy = repo.proxy
540         else:
541             proxy = get_proxy_for(baseurl)
542
543         proxies = None
544         if proxy:
545             proxies = {str(baseurl.split(":")[0]): str(proxy)}
546
547         makedirs(os.path.join(cachedir, reponame))
548         url = baseurl.join("repodata/repomd.xml")
549         filename = os.path.join(cachedir, reponame, 'repomd.xml')
550         repomd = myurlgrab(url.full, filename, proxies)
551         try:
552             root = xmlparse(repomd)
553         except SyntaxError:
554             raise CreatorError("repomd.xml syntax error.")
555
556         ns = root.getroot().tag
557         ns = ns[0:ns.rindex("}")+1]
558
559         filepaths = {}
560         checksums = {}
561         sumtypes = {}
562
563         for elm in root.getiterator("%sdata" % ns):
564             if elm.attrib["type"] == "patterns":
565                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
566                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
567                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
568                 break
569
570         for elm in root.getiterator("%sdata" % ns):
571             if elm.attrib["type"] in ("group_gz", "group"):
572                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
573                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
574                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
575                 break
576
577         primary_type = None
578         for elm in root.getiterator("%sdata" % ns):
579             if elm.attrib["type"] in ("primary_db", "primary"):
580                 primary_type = elm.attrib["type"]
581                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
582                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
583                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
584                 break
585
586         if not primary_type:
587             continue
588
589         for item in ("primary", "patterns", "comps"):
590             if item not in filepaths:
591                 filepaths[item] = None
592                 continue
593             if not filepaths[item]:
594                 continue
595             filepaths[item] = _get_metadata_from_repo(baseurl,
596                                                       proxies,
597                                                       cachedir,
598                                                       reponame,
599                                                       filepaths[item],
600                                                       sumtypes[item],
601                                                       checksums[item])
602
603         """ Get repo key """
604         try:
605             repokey = _get_metadata_from_repo(baseurl,
606                                               proxies,
607                                               cachedir,
608                                               reponame,
609                                               "repodata/repomd.xml.key")
610         except CreatorError:
611             repokey = None
612             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
613
614         my_repo_metadata.append({"name":reponame,
615                                  "baseurl":baseurl,
616                                  "repomd":repomd,
617                                  "primary":filepaths['primary'],
618                                  "cachedir":cachedir,
619                                  "proxies":proxies,
620                                  "patterns":filepaths['patterns'],
621                                  "comps":filepaths['comps'],
622                                  "repokey":repokey})
623
624     return my_repo_metadata
625
626 def get_rpmver_in_repo(repometadata):
627     for repo in repometadata:
628         if repo["primary"].endswith(".xml"):
629             root = xmlparse(repo["primary"])
630             ns = root.getroot().tag
631             ns = ns[0:ns.rindex("}")+1]
632
633             versionlist = []
634             for elm in root.getiterator("%spackage" % ns):
635                 if elm.find("%sname" % ns).text == 'rpm':
636                     for node in elm.getchildren():
637                         if node.tag == "%sversion" % ns:
638                             versionlist.append(node.attrib['ver'])
639
640             if versionlist:
641                 return reversed(
642                          sorted(
643                            versionlist,
644                            key = lambda ver: map(int, ver.split('.')))).next()
645
646         elif repo["primary"].endswith(".sqlite"):
647             con = sqlite.connect(repo["primary"])
648             for row in con.execute("select version from packages where "
649                                    "name=\"rpm\" ORDER by version DESC"):
650                 con.close()
651                 return row[0]
652
653     return None
654
655 def get_arch(repometadata):
656     archlist = []
657     for repo in repometadata:
658         if repo["primary"].endswith(".xml"):
659             root = xmlparse(repo["primary"])
660             ns = root.getroot().tag
661             ns = ns[0:ns.rindex("}")+1]
662             for elm in root.getiterator("%spackage" % ns):
663                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
664                     arch = elm.find("%sarch" % ns).text
665                     if arch not in archlist:
666                         archlist.append(arch)
667         elif repo["primary"].endswith(".sqlite"):
668             con = sqlite.connect(repo["primary"])
669             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
670                 if row[0] not in archlist:
671                     archlist.append(row[0])
672
673             con.close()
674
675     uniq_arch = []
676     for i in range(len(archlist)):
677         if archlist[i] not in rpmmisc.archPolicies.keys():
678             continue
679         need_append = True
680         j = 0
681         while j < len(uniq_arch):
682             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
683                 need_append = False
684                 break
685             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
686                 if need_append:
687                     uniq_arch[j] = archlist[i]
688                     need_append = False
689                 else:
690                     uniq_arch.remove(uniq_arch[j])
691                     continue
692             j += 1
693         if need_append:
694              uniq_arch.append(archlist[i])
695
696     return uniq_arch, archlist
697
698 def get_package(pkg, repometadata, arch = None):
699     ver = ""
700     target_repo = None
701     if not arch:
702         arches = []
703     elif arch not in rpmmisc.archPolicies:
704         arches = [arch]
705     else:
706         arches = rpmmisc.archPolicies[arch].split(':')
707         arches.append('noarch')
708
709     for repo in repometadata:
710         if repo["primary"].endswith(".xml"):
711             root = xmlparse(repo["primary"])
712             ns = root.getroot().tag
713             ns = ns[0:ns.rindex("}")+1]
714             for elm in root.getiterator("%spackage" % ns):
715                 if elm.find("%sname" % ns).text == pkg:
716                     if elm.find("%sarch" % ns).text in arches:
717                         version = elm.find("%sversion" % ns)
718                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
719                         if tmpver > ver:
720                             ver = tmpver
721                             location = elm.find("%slocation" % ns)
722                             pkgpath = "%s" % location.attrib['href']
723                             target_repo = repo
724                         break
725         if repo["primary"].endswith(".sqlite"):
726             con = sqlite.connect(repo["primary"])
727             if arch:
728                 sql = 'select version, release, location_href from packages ' \
729                       'where name = "%s" and arch IN ("%s")' % \
730                       (pkg, '","'.join(arches))
731                 for row in con.execute(sql):
732                     tmpver = "%s-%s" % (row[0], row[1])
733                     if tmpver > ver:
734                         ver = tmpver
735                         pkgpath = "%s" % row[2]
736                         target_repo = repo
737                     break
738             else:
739                 sql = 'select version, release, location_href from packages ' \
740                       'where name = "%s"' % pkg
741                 for row in con.execute(sql):
742                     tmpver = "%s-%s" % (row[0], row[1])
743                     if tmpver > ver:
744                         ver = tmpver
745                         pkgpath = "%s" % row[2]
746                         target_repo = repo
747                     break
748             con.close()
749     if target_repo:
750         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
751         url = target_repo["baseurl"].join(pkgpath)
752         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
753         if os.path.exists(filename):
754             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
755             if ret == 0:
756                 return filename
757
758             msger.warning("package %s is damaged: %s" %
759                           (os.path.basename(filename), filename))
760             os.unlink(filename)
761
762         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
763         return pkg
764     else:
765         return None
766
767 def get_source_name(pkg, repometadata):
768
769     def get_bin_name(pkg):
770         m = RPM_RE.match(pkg)
771         if m:
772             return m.group(1)
773         return None
774
775     def get_src_name(srpm):
776         m = SRPM_RE.match(srpm)
777         if m:
778             return m.group(1)
779         return None
780
781     ver = ""
782     target_repo = None
783
784     pkg_name = get_bin_name(pkg)
785     if not pkg_name:
786         return None
787
788     for repo in repometadata:
789         if repo["primary"].endswith(".xml"):
790             root = xmlparse(repo["primary"])
791             ns = root.getroot().tag
792             ns = ns[0:ns.rindex("}")+1]
793             for elm in root.getiterator("%spackage" % ns):
794                 if elm.find("%sname" % ns).text == pkg_name:
795                     if elm.find("%sarch" % ns).text != "src":
796                         version = elm.find("%sversion" % ns)
797                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
798                         if tmpver > ver:
799                             ver = tmpver
800                             fmt = elm.find("%sformat" % ns)
801                             if fmt:
802                                 fns = fmt.getchildren()[0].tag
803                                 fns = fns[0:fns.rindex("}")+1]
804                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
805                                 target_repo = repo
806                         break
807
808         if repo["primary"].endswith(".sqlite"):
809             con = sqlite.connect(repo["primary"])
810             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
811                 tmpver = "%s-%s" % (row[0], row[1])
812                 if tmpver > ver:
813                     pkgpath = "%s" % row[2]
814                     target_repo = repo
815                 break
816             con.close()
817     if target_repo:
818         return get_src_name(pkgpath)
819     else:
820         return None
821
822 def get_pkglist_in_patterns(group, patterns):
823     found = False
824     pkglist = []
825     try:
826         root = xmlparse(patterns)
827     except SyntaxError:
828         raise SyntaxError("%s syntax error." % patterns)
829
830     for elm in list(root.getroot()):
831         ns = elm.tag
832         ns = ns[0:ns.rindex("}")+1]
833         name = elm.find("%sname" % ns)
834         summary = elm.find("%ssummary" % ns)
835         if name.text == group or summary.text == group:
836             found = True
837             break
838
839     if not found:
840         return pkglist
841
842     found = False
843     for requires in list(elm):
844         if requires.tag.endswith("requires"):
845             found = True
846             break
847
848     if not found:
849         return pkglist
850
851     for pkg in list(requires):
852         pkgname = pkg.attrib["name"]
853         if pkgname not in pkglist:
854             pkglist.append(pkgname)
855
856     return pkglist
857
858 def get_pkglist_in_comps(group, comps):
859     found = False
860     pkglist = []
861     try:
862         root = xmlparse(comps)
863     except SyntaxError:
864         raise SyntaxError("%s syntax error." % comps)
865
866     for elm in root.getiterator("group"):
867         id = elm.find("id")
868         name = elm.find("name")
869         if id.text == group or name.text == group:
870             packagelist = elm.find("packagelist")
871             found = True
872             break
873
874     if not found:
875         return pkglist
876
877     for require in elm.getiterator("packagereq"):
878         if require.tag.endswith("packagereq"):
879             pkgname = require.text
880         if pkgname not in pkglist:
881             pkglist.append(pkgname)
882
883     return pkglist
884
885 def is_statically_linked(binary):
886     return ", statically linked, " in runner.outs(['file', binary])
887
888 def setup_qemu_emulator(rootdir, arch):
889     # mount binfmt_misc if it doesn't exist
890     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
891         modprobecmd = find_binary_path("modprobe")
892         runner.show([modprobecmd, "binfmt_misc"])
893     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
894         mountcmd = find_binary_path("mount")
895         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
896
897     # qemu_emulator is a special case, we can't use find_binary_path
898     # qemu emulator should be a statically-linked executable file
899     if arch == "aarch64":
900         arm_binary = "qemu-arm64"
901         node = "/proc/sys/fs/binfmt_misc/aarch64"
902     else:
903         arm_binary = "qemu-arm"
904         node = "/proc/sys/fs/binfmt_misc/arm"
905
906     qemu_emulator = "/usr/bin/%s" % arm_binary
907     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
908         qemu_emulator = "/usr/bin/%s-static" % arm_binary
909     if not os.path.exists(qemu_emulator):
910         raise CreatorError("Please install a statically-linked %s" % arm_binary)
911
912     if not os.path.exists(rootdir + "/usr/bin"):
913         makedirs(rootdir + "/usr/bin")
914     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
915
916     # disable selinux, selinux will block qemu emulator to run
917     if os.path.exists("/usr/sbin/setenforce"):
918         msger.info('Try to disable selinux')
919         runner.show(["/usr/sbin/setenforce", "0"])
920
921     # unregister it if it has been registered and is a dynamically-linked executable
922     if os.path.exists(node):
923         qemu_unregister_string = "-1\n"
924         with open(node, "w") as fd:
925             fd.write(qemu_unregister_string)
926
927     # register qemu emulator for interpreting other arch executable file
928     if not os.path.exists(node):
929         if arch == "aarch64":
930             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
931         else:
932             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
933         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
934             fd.write(qemu_arm_string)
935
936     return qemu_emulator
937
938 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
939     def get_source_repometadata(repometadata):
940         src_repometadata=[]
941         for repo in repometadata:
942             if repo["name"].endswith("-source"):
943                 src_repometadata.append(repo)
944         if src_repometadata:
945             return src_repometadata
946         return None
947
948     def get_src_name(srpm):
949         m = SRPM_RE.match(srpm)
950         if m:
951             return m.group(1)
952         return None
953
954     src_repometadata = get_source_repometadata(repometadata)
955
956     if not src_repometadata:
957         msger.warning("No source repo found")
958         return None
959
960     src_pkgs = []
961     lpkgs_dict = {}
962     lpkgs_path = []
963     for repo in src_repometadata:
964         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
965         lpkgs_path += glob.glob(cachepath)
966
967     for lpkg in lpkgs_path:
968         lpkg_name = get_src_name(os.path.basename(lpkg))
969         lpkgs_dict[lpkg_name] = lpkg
970     localpkgs = lpkgs_dict.keys()
971
972     cached_count = 0
973     destdir = instroot+'/usr/src/SRPMS'
974     if not os.path.exists(destdir):
975         os.makedirs(destdir)
976
977     srcpkgset = set()
978     for _pkg in pkgs:
979         srcpkg_name = get_source_name(_pkg, repometadata)
980         if not srcpkg_name:
981             continue
982         srcpkgset.add(srcpkg_name)
983
984     for pkg in list(srcpkgset):
985         if pkg in localpkgs:
986             cached_count += 1
987             shutil.copy(lpkgs_dict[pkg], destdir)
988             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
989         else:
990             src_pkg = get_package(pkg, src_repometadata, 'src')
991             if src_pkg:
992                 shutil.copy(src_pkg, destdir)
993                 src_pkgs.append(src_pkg)
994     msger.info("%d source packages gotten from cache" % cached_count)
995
996     return src_pkgs
997
998 def strip_end(text, suffix):
999     if not text.endswith(suffix):
1000         return text
1001     return text[:-len(suffix)]