fix version comparison for bootstrap
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 from __future__ import with_statement
19 import os
20 import sys
21 import tempfile
22 import re
23 import shutil
24 import glob
25 import hashlib
26 import subprocess
27 import platform
28 import rpmmisc
29
30 try:
31     from hashlib import md5
32 except ImportError:
33     from md5 import md5
34
35 try:
36     import sqlite3 as sqlite
37 except ImportError:
38     import sqlite
39
40 try:
41     from xml.etree import cElementTree
42 except ImportError:
43     import cElementTree
44 xmlparse = cElementTree.parse
45
46 from errors import *
47 from fs_related import *
48 from rpmmisc import myurlgrab
49 from proxy import get_proxy_for
50 import runner
51
52 from mic import msger
53
54 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(ver_rel)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
57
58 def get_distro():
59     """Detect linux distribution, support "meego"
60     """
61
62     support_dists = ('SuSE',
63                      'debian',
64                      'fedora',
65                      'redhat',
66                      'centos',
67                      'meego',
68                      'moblin',
69                      'tizen')
70     try:
71         (dist, ver, id) = platform.linux_distribution( \
72                               supported_dists = support_dists)
73     except:
74         (dist, ver, id) = platform.dist( \
75                               supported_dists = support_dists)
76
77     return (dist, ver, id)
78
79 def get_distro_str():
80     """Get composited string for current linux distribution
81     """
82     (dist, ver, id) = get_distro()
83
84     if not dist:
85         return 'Unknown Linux Distro'
86     else:
87         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
88         return distro_str.strip()
89
90 _LOOP_RULE_PTH = None
91 def hide_loopdev_presentation():
92     udev_rules = "80-prevent-loop-present.rules"
93     udev_rules_dir = [
94                        '/usr/lib/udev/rules.d/',
95                        '/lib/udev/rules.d/',
96                        '/etc/udev/rules.d/'
97                      ]
98
99     for rdir in udev_rules_dir:
100         if os.path.exists(rdir):
101             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
102
103     if not _LOOP_RULE_PTH:
104         return
105
106     try:
107         with open(_LOOP_RULE_PTH, 'w') as wf:
108             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
109
110         runner.quiet('udevadm trigger')
111     except:
112         pass
113
114 def unhide_loopdev_presentation():
115     if not _LOOP_RULE_PTH:
116         return
117
118     try:
119         os.unlink(_LOOP_RULE_PTH)
120         runner.quiet('udevadm trigger')
121     except:
122         pass
123
124 def extract_rpm(rpmfile, targetdir):
125     rpm2cpio = find_binary_path("rpm2cpio")
126     cpio = find_binary_path("cpio")
127
128     olddir = os.getcwd()
129     os.chdir(targetdir)
130
131     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
132     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
133     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
134                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
135     (sout, serr) = p2.communicate()
136     msger.verbose(sout or serr)
137
138     os.chdir(olddir)
139
140 def compressing(fpath, method):
141     comp_map = {
142         "gz": "gzip",
143         "bz2": "bzip2"
144     }
145     if method not in comp_map:
146         raise CreatorError("Unsupport compress format: %s, valid values: %s"
147                            % (method, ','.join(comp_map.keys())))
148     cmd = find_binary_path(comp_map[method])
149     rc = runner.show([cmd, "-f", fpath])
150     if rc:
151         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
152
153 def taring(dstfile, target):
154     import tarfile
155     basen, ext = os.path.splitext(dstfile)
156     comp = {".tar": None,
157             ".gz": "gz", # for .tar.gz
158             ".bz2": "bz2", # for .tar.bz2
159             ".tgz": "gz",
160             ".tbz": "bz2"}[ext]
161
162     # specify tarball file path
163     if not comp:
164         tarpath = dstfile
165     elif basen.endswith(".tar"):
166         tarpath = basen
167     else:
168         tarpath = basen + ".tar"
169     wf = tarfile.open(tarpath, 'w')
170
171     if os.path.isdir(target):
172         for item in os.listdir(target):
173             wf.add(os.path.join(target, item), item)
174     else:
175         wf.add(target, os.path.basename(target))
176     wf.close()
177
178     if comp:
179         compressing(tarpath, comp)
180         # when dstfile ext is ".tgz" and ".tbz", should rename
181         if not basen.endswith(".tar"):
182             shutil.move("%s.%s" % (tarpath, comp), dstfile)
183
184 def ziping(dstfile, target):
185     import zipfile
186     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
187     if os.path.isdir(target):
188         for item in os.listdir(target):
189             fpath = os.path.join(target, item)
190             if not os.path.isfile(fpath):
191                 continue
192             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
193     else:
194         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
195     wf.close()
196
197 pack_formats = {
198     ".tar": taring,
199     ".tar.gz": taring,
200     ".tar.bz2": taring,
201     ".tgz": taring,
202     ".tbz": taring,
203     ".zip": ziping,
204 }
205
206 def packing(dstfile, target):
207     (base, ext) = os.path.splitext(dstfile)
208     if ext in (".gz", ".bz2") and base.endswith(".tar"):
209         ext = ".tar" + ext
210     if ext not in pack_formats:
211         raise CreatorError("Unsupport pack format: %s, valid values: %s"
212                            % (ext, ','.join(pack_formats.keys())))
213     func = pack_formats[ext]
214     # func should be callable
215     func(dstfile, target)
216
217 def human_size(size):
218     """Return human readable string for Bytes size
219     """
220
221     if size <= 0:
222         return "0M"
223     import math
224     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
225     expo = int(math.log(size, 1024))
226     mant = float(size/math.pow(1024, expo))
227     return "{0:.1f}{1:s}".format(mant, measure[expo])
228
229 def check_space_pre_cp(src, dst):
230     """Check whether disk space is enough before 'cp' like
231     operations, else exception will be raised.
232     """
233
234     srcsize  = get_file_size(src) * 1024 * 1024
235     freesize = get_filesystem_avail(dst)
236     if srcsize > freesize:
237         raise CreatorError("space on %s(%s) is not enough for about %s files"
238                            % (dst, human_size(freesize), human_size(srcsize)))
239
240 def get_md5sum(fpath):
241     blksize = 65536 # should be optimized enough
242
243     md5sum = md5()
244     with open(fpath, 'rb') as f:
245         while True:
246             data = f.read(blksize)
247             if not data:
248                 break
249             md5sum.update(data)
250     return md5sum.hexdigest()
251
252 def normalize_ksfile(ksconf, release, arch):
253     def _clrtempks():
254         try:
255             os.unlink(ksconf)
256         except:
257             pass
258
259     if not os.path.exists(ksconf):
260         return
261
262     if not release:
263         release = "latest"
264     if not arch or re.match(r'i.86', arch):
265         arch = "ia32"
266
267     with open(ksconf) as f:
268         ksc = f.read()
269
270     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
271         msger.info("Substitute macro variable @BUILD_ID@/@ARCH in ks: %s" % ksconf)
272         ksc = ksc.replace("@ARCH@", arch)
273         ksc = ksc.replace("@BUILD_ID@", release)
274         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
275         os.write(fd, ksc)
276         os.close(fd)
277
278         msger.debug('new ks path %s' % ksconf)
279
280         import atexit
281         atexit.register(_clrtempks)
282
283     return ksconf
284
285 def _check_mic_chroot(rootdir):
286     def _path(path):
287         return rootdir.rstrip('/') + path
288
289     release_files = map(_path, [ "/etc/moblin-release",
290                                  "/etc/meego-release",
291                                  "/etc/tizen-release"])
292
293     if not any(map(os.path.exists, release_files)):
294         msger.warning("Dir %s is not a MeeGo/Tizen chroot env")
295
296     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
297         msger.warning("Failed to find kernel module under %s" % rootdir)
298
299     return
300
301 def selinux_check(arch, fstypes):
302     try:
303         getenforce = find_binary_path('getenforce')
304     except CreatorError:
305         return
306
307     selinux_status = runner.outs([getenforce])
308     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
309         raise CreatorError("Can't create arm image if selinux is enabled, "
310                            "please run 'setenforce 0' to disable selinux")
311
312     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
313     if use_btrfs and selinux_status == "Enforcing":
314         raise CreatorError("Can't create btrfs image if selinux is enabled,"
315                            " please run 'setenforce 0' to disable selinux")
316
317 def get_image_type(path):
318     def _get_extension_name(path):
319         match = re.search("(?<=\.)\w+$", path)
320         if match:
321             return match.group(0)
322         else:
323             return None
324
325     if os.path.isdir(path):
326         _check_mic_chroot(path)
327         return "fs"
328
329     maptab = {
330               "tar": "loop",
331               "raw":"raw",
332               "vmdk":"vmdk",
333               "vdi":"vdi",
334               "iso":"livecd",
335               "usbimg":"liveusb",
336              }
337
338     extension = _get_extension_name(path)
339     if extension in maptab:
340         return maptab[extension]
341
342     fd = open(path, "rb")
343     file_header = fd.read(1024)
344     fd.close()
345     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
346     if file_header[0:len(vdi_flag)] == vdi_flag:
347         return maptab["vdi"]
348
349     output = runner.outs(['file', path])
350     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
351     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
352     rawptn = re.compile(r".*x86 boot sector.*")
353     vmdkptn = re.compile(r".*VMware. disk image.*")
354     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
355     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
356     btrfsimgptn = re.compile(r".*BTRFS.*")
357     if isoptn.match(output):
358         return maptab["iso"]
359     elif usbimgptn.match(output):
360         return maptab["usbimg"]
361     elif rawptn.match(output):
362         return maptab["raw"]
363     elif vmdkptn.match(output):
364         return maptab["vmdk"]
365     elif ext3fsimgptn.match(output):
366         return "ext3fsimg"
367     elif ext4fsimgptn.match(output):
368         return "ext4fsimg"
369     elif btrfsimgptn.match(output):
370         return "btrfsimg"
371     else:
372         raise CreatorError("Cannot detect the type of image: %s" % path)
373
374 def get_file_size(file):
375     """ Return size in MB unit """
376     rc, duOutput  = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
377     if rc != 0:
378         raise CreatorError("Failed to run %s" % du)
379
380     size1 = int(duOutput.split()[0])
381     rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
382     if rc != 0:
383         raise CreatorError("Failed to run %s" % du)
384
385     size2 = int(duOutput.split()[0])
386     if size1 > size2:
387         return size1
388     else:
389         return size2
390
391 def get_filesystem_avail(fs):
392     vfstat = os.statvfs(fs)
393     return vfstat.f_bavail * vfstat.f_bsize
394
395 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
396     #convert disk format
397     if dstfmt != "raw":
398         raise CreatorError("Invalid destination image format: %s" % dstfmt)
399     msger.debug("converting %s image to %s" % (srcimg, dstimg))
400     if srcfmt == "vmdk":
401         path = find_binary_path("qemu-img")
402         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
403     elif srcfmt == "vdi":
404         path = find_binary_path("VBoxManage")
405         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
406     else:
407         raise CreatorError("Invalid soure image format: %s" % srcfmt)
408
409     rc = runner.show(argv)
410     if rc == 0:
411         msger.debug("convert successful")
412     if rc != 0:
413         raise CreatorError("Unable to convert disk to %s" % dstfmt)
414
415 def uncompress_squashfs(squashfsimg, outdir):
416     """Uncompress file system from squshfs image"""
417     unsquashfs = find_binary_path("unsquashfs")
418     args = [ unsquashfs, "-d", outdir, squashfsimg ]
419     rc = runner.show(args)
420     if (rc != 0):
421         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
422
423 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
424     """ FIXME: use the dir in mic.conf instead """
425
426     makedirs(dir)
427     return tempfile.mkdtemp(dir = dir, prefix = prefix)
428
429 def get_repostrs_from_ks(ks):
430     def _get_temp_reponame(baseurl):
431         md5obj = hashlib.md5(baseurl)
432         tmpreponame = "%s" % md5obj.hexdigest()
433         return tmpreponame
434
435     kickstart_repos = []
436
437     for repodata in ks.handler.repo.repoList:
438         repo = {}
439         for attr in ('name',
440                      'baseurl',
441                      'mirrorlist',
442                      'includepkgs', # val is list
443                      'excludepkgs', # val is list
444                      'cost',    # int
445                      'priority',# int
446                      'save',
447                      'proxy',
448                      'proxyuser',
449                      'proxypasswd',
450                      'proxypasswd',
451                      'debuginfo',
452                      'source',
453                      'gpgkey',
454                      'ssl_verify'):
455             if hasattr(repodata, attr) and getattr(repodata, attr):
456                 repo[attr] = getattr(repodata, attr)
457
458         if 'name' not in repo:
459             repo['name'] = _get_temp_reponame(repodata.baseurl)
460
461         kickstart_repos.append(repo)
462
463     return kickstart_repos
464
465 def _get_uncompressed_data_from_url(url, filename, proxies):
466     filename = myurlgrab(url, filename, proxies)
467     suffix = None
468     if filename.endswith(".gz"):
469         suffix = ".gz"
470         runner.quiet(['gunzip', "-f", filename])
471     elif filename.endswith(".bz2"):
472         suffix = ".bz2"
473         runner.quiet(['bunzip2', "-f", filename])
474     if suffix:
475         filename = filename.replace(suffix, "")
476     return filename
477
478 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
479                             sumtype=None, checksum=None):
480     url = os.path.join(baseurl, filename)
481     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
482     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
483         filename = os.path.splitext(filename_tmp)[0]
484     else:
485         filename = filename_tmp
486     if sumtype and checksum and os.path.exists(filename):
487         try:
488             sumcmd = find_binary_path("%ssum" % sumtype)
489         except:
490             file_checksum = None
491         else:
492             file_checksum = runner.outs([sumcmd, filename]).split()[0]
493
494         if file_checksum and file_checksum == checksum:
495             return filename
496
497     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
498
499 def get_metadata_from_repos(repos, cachedir):
500     my_repo_metadata = []
501     for repo in repos:
502         reponame = repo['name']
503         baseurl  = repo['baseurl']
504
505
506         if 'proxy' in repo:
507             proxy = repo['proxy']
508         else:
509             proxy = get_proxy_for(baseurl)
510
511         proxies = None
512         if proxy:
513            proxies = {str(baseurl.split(":")[0]):str(proxy)}
514
515         makedirs(os.path.join(cachedir, reponame))
516         url = os.path.join(baseurl, "repodata/repomd.xml")
517         filename = os.path.join(cachedir, reponame, 'repomd.xml')
518         repomd = myurlgrab(url, filename, proxies)
519         try:
520             root = xmlparse(repomd)
521         except SyntaxError:
522             raise CreatorError("repomd.xml syntax error.")
523
524         ns = root.getroot().tag
525         ns = ns[0:ns.rindex("}")+1]
526
527         filepaths = {}
528         checksums = {}
529         sumtypes = {}
530
531         for elm in root.getiterator("%sdata" % ns):
532             if elm.attrib["type"] == "patterns":
533                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
534                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
535                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
536                 break
537
538         for elm in root.getiterator("%sdata" % ns):
539             if elm.attrib["type"] in ("group_gz", "group"):
540                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
541                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
542                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
543                 break
544
545         primary_type = None
546         for elm in root.getiterator("%sdata" % ns):
547             if elm.attrib["type"] in ("primary_db", "primary"):
548                 primary_type = elm.attrib["type"]
549                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
550                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
551                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
552                 break
553
554         if not primary_type:
555             continue
556
557         for item in ("primary", "patterns", "comps"):
558             if item not in filepaths:
559                 filepaths[item] = None
560                 continue
561             if not filepaths[item]:
562                 continue
563             filepaths[item] = _get_metadata_from_repo(baseurl,
564                                                       proxies,
565                                                       cachedir,
566                                                       reponame,
567                                                       filepaths[item],
568                                                       sumtypes[item],
569                                                       checksums[item])
570
571         """ Get repo key """
572         try:
573             repokey = _get_metadata_from_repo(baseurl,
574                                               proxies,
575                                               cachedir,
576                                               reponame,
577                                               "repodata/repomd.xml.key")
578         except CreatorError:
579             repokey = None
580             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
581
582         my_repo_metadata.append({"name":reponame,
583                                  "baseurl":baseurl,
584                                  "repomd":repomd,
585                                  "primary":filepaths['primary'],
586                                  "cachedir":cachedir,
587                                  "proxies":proxies,
588                                  "patterns":filepaths['patterns'],
589                                  "comps":filepaths['comps'],
590                                  "repokey":repokey})
591
592     return my_repo_metadata
593
594 def get_rpmver_in_repo(repometadata):
595     for repo in repometadata:
596         if repo["primary"].endswith(".xml"):
597             root = xmlparse(repo["primary"])
598             ns = root.getroot().tag
599             ns = ns[0:ns.rindex("}")+1]
600
601             versionlist = []
602             for elm in root.getiterator("%spackage" % ns):
603                 if elm.find("%sname" % ns).text == 'rpm':
604                     for node in elm.getchildren():
605                         if node.tag == "%sversion" % ns:
606                             versionlist.append(node.attrib['ver'])
607
608             if versionlist:
609                 return reversed(
610                          sorted(
611                            versionlist,
612                            key = lambda ver: map(int, ver.split('.')))).next()
613
614         elif repo["primary"].endswith(".sqlite"):
615             con = sqlite.connect(repo["primary"])
616             for row in con.execute("select version from packages where "
617                                    "name=\"rpm\" ORDER by version DESC"):
618                 con.close()
619                 return row[0]
620
621     return None
622
623 def get_arch(repometadata):
624     archlist = []
625     for repo in repometadata:
626         if repo["primary"].endswith(".xml"):
627             root = xmlparse(repo["primary"])
628             ns = root.getroot().tag
629             ns = ns[0:ns.rindex("}")+1]
630             for elm in root.getiterator("%spackage" % ns):
631                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
632                     arch = elm.find("%sarch" % ns).text
633                     if arch not in archlist:
634                         archlist.append(arch)
635         elif repo["primary"].endswith(".sqlite"):
636             con = sqlite.connect(repo["primary"])
637             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
638                 if row[0] not in archlist:
639                     archlist.append(row[0])
640
641             con.close()
642
643     uniq_arch = []
644     for i in range(len(archlist)):
645         if archlist[i] not in rpmmisc.archPolicies.keys():
646             continue
647         need_append = True
648         j = 0
649         while j < len(uniq_arch):
650             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
651                 need_append = False
652                 break
653             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
654                 if need_append:
655                     uniq_arch[j] = archlist[i]
656                     need_append = False
657                 else:
658                     uniq_arch.remove(uniq_arch[j])
659                     continue
660             j += 1
661         if need_append:
662              uniq_arch.append(archlist[i])
663
664     return uniq_arch, archlist
665
666 def get_package(pkg, repometadata, arch = None):
667     ver = ""
668     target_repo = None
669     if not arch:
670         arches = []
671     elif arch not in rpmmisc.archPolicies:
672         arches = [arch]
673     else:
674         arches = rpmmisc.archPolicies[arch].split(':')
675         arches.append('noarch')
676
677     for repo in repometadata:
678         if repo["primary"].endswith(".xml"):
679             root = xmlparse(repo["primary"])
680             ns = root.getroot().tag
681             ns = ns[0:ns.rindex("}")+1]
682             for elm in root.getiterator("%spackage" % ns):
683                 if elm.find("%sname" % ns).text == pkg:
684                     if elm.find("%sarch" % ns).text in arches:
685                         version = elm.find("%sversion" % ns)
686                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
687                         if tmpver > ver:
688                             ver = tmpver
689                             location = elm.find("%slocation" % ns)
690                             pkgpath = "%s" % location.attrib['href']
691                             target_repo = repo
692                         break
693         if repo["primary"].endswith(".sqlite"):
694             con = sqlite.connect(repo["primary"])
695             if arch:
696                 sql = 'select version, release, location_href from packages ' \
697                       'where name = "%s" and arch IN ("%s")' % \
698                       (pkg, '","'.join(arches))
699                 for row in con.execute(sql):
700                     tmpver = "%s-%s" % (row[0], row[1])
701                     if tmpver > ver:
702                         ver = tmpver
703                         pkgpath = "%s" % row[2]
704                         target_repo = repo
705                     break
706             else:
707                 sql = 'select version, release, location_href from packages ' \
708                       'where name = "%s"' % pkg
709                 for row in con.execute(sql):
710                     tmpver = "%s-%s" % (row[0], row[1])
711                     if tmpver > ver:
712                         ver = tmpver
713                         pkgpath = "%s" % row[2]
714                         target_repo = repo
715                     break
716             con.close()
717     if target_repo:
718         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
719         url = os.path.join(target_repo["baseurl"], pkgpath)
720         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
721         if os.path.exists(filename):
722             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
723             if ret == 0:
724                 return filename
725
726             msger.warning("package %s is damaged: %s" %
727                           (os.path.basename(filename), filename))
728             os.unlink(filename)
729
730         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
731         return pkg
732     else:
733         return None
734
735 def get_source_name(pkg, repometadata):
736
737     def get_bin_name(pkg):
738         m = RPM_RE.match(pkg)
739         if m:
740             return m.group(1)
741         return None
742
743     def get_src_name(srpm):
744         m = SRPM_RE.match(srpm)
745         if m:
746             return m.group(1)
747         return None
748
749     ver = ""
750     target_repo = None
751
752     pkg_name = get_bin_name(pkg)
753     if not pkg_name:
754         return None
755
756     for repo in repometadata:
757         if repo["primary"].endswith(".xml"):
758             root = xmlparse(repo["primary"])
759             ns = root.getroot().tag
760             ns = ns[0:ns.rindex("}")+1]
761             for elm in root.getiterator("%spackage" % ns):
762                 if elm.find("%sname" % ns).text == pkg_name:
763                     if elm.find("%sarch" % ns).text != "src":
764                         version = elm.find("%sversion" % ns)
765                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
766                         if tmpver > ver:
767                             ver = tmpver
768                             fmt = elm.find("%sformat" % ns)
769                             if fmt:
770                                 fns = fmt.getchildren()[0].tag
771                                 fns = fns[0:fns.rindex("}")+1]
772                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
773                                 target_repo = repo
774                         break
775
776         if repo["primary"].endswith(".sqlite"):
777             con = sqlite.connect(repo["primary"])
778             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
779                 tmpver = "%s-%s" % (row[0], row[1])
780                 if tmpver > ver:
781                     pkgpath = "%s" % row[2]
782                     target_repo = repo
783                 break
784             con.close()
785     if target_repo:
786         return get_src_name(pkgpath)
787     else:
788         return None
789
790 def get_pkglist_in_patterns(group, patterns):
791     found = False
792     pkglist = []
793     try:
794         root = xmlparse(patterns)
795     except SyntaxError:
796         raise SyntaxError("%s syntax error." % patterns)
797
798     for elm in list(root.getroot()):
799         ns = elm.tag
800         ns = ns[0:ns.rindex("}")+1]
801         name = elm.find("%sname" % ns)
802         summary = elm.find("%ssummary" % ns)
803         if name.text == group or summary.text == group:
804             found = True
805             break
806
807     if not found:
808         return pkglist
809
810     found = False
811     for requires in list(elm):
812         if requires.tag.endswith("requires"):
813             found = True
814             break
815
816     if not found:
817         return pkglist
818
819     for pkg in list(requires):
820         pkgname = pkg.attrib["name"]
821         if pkgname not in pkglist:
822             pkglist.append(pkgname)
823
824     return pkglist
825
826 def get_pkglist_in_comps(group, comps):
827     found = False
828     pkglist = []
829     try:
830         root = xmlparse(comps)
831     except SyntaxError:
832         raise SyntaxError("%s syntax error." % comps)
833
834     for elm in root.getiterator("group"):
835         id = elm.find("id")
836         name = elm.find("name")
837         if id.text == group or name.text == group:
838             packagelist = elm.find("packagelist")
839             found = True
840             break
841
842     if not found:
843         return pkglist
844
845     for require in elm.getiterator("packagereq"):
846         if require.tag.endswith("packagereq"):
847             pkgname = require.text
848         if pkgname not in pkglist:
849             pkglist.append(pkgname)
850
851     return pkglist
852
853 def is_statically_linked(binary):
854     return ", statically linked, " in runner.outs(['file', binary])
855
856 def setup_qemu_emulator(rootdir, arch):
857     # mount binfmt_misc if it doesn't exist
858     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
859         modprobecmd = find_binary_path("modprobe")
860         runner.show([modprobecmd, "binfmt_misc"])
861     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
862         mountcmd = find_binary_path("mount")
863         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
864
865     # qemu_emulator is a special case, we can't use find_binary_path
866     # qemu emulator should be a statically-linked executable file
867     qemu_emulator = "/usr/bin/qemu-arm"
868     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
869         qemu_emulator = "/usr/bin/qemu-arm-static"
870     if not os.path.exists(qemu_emulator):
871         raise CreatorError("Please install a statically-linked qemu-arm")
872
873     # qemu emulator version check
874     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
875     if arch in armv7_list:  # need qemu (>=0.13.0)
876         qemuout = runner.outs([qemu_emulator, "-h"])
877         m = re.search("version\s*([.\d]+)", qemuout)
878         if m:
879             qemu_version = m.group(1)
880             if qemu_version < "0.13":
881                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
882         else:
883             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
884
885     if not os.path.exists(rootdir + "/usr/bin"):
886         makedirs(rootdir + "/usr/bin")
887     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
888
889     # disable selinux, selinux will block qemu emulator to run
890     if os.path.exists("/usr/sbin/setenforce"):
891         msger.info('Try to disable selinux')
892         runner.show(["/usr/sbin/setenforce", "0"])
893
894     node = "/proc/sys/fs/binfmt_misc/arm"
895     if is_statically_linked(qemu_emulator) and os.path.exists(node):
896         return qemu_emulator
897
898     # unregister it if it has been registered and is a dynamically-linked executable
899     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
900         qemu_unregister_string = "-1\n"
901         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
902         fd.write(qemu_unregister_string)
903         fd.close()
904
905     # register qemu emulator for interpreting other arch executable file
906     if not os.path.exists(node):
907         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
908         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
909         fd.write(qemu_arm_string)
910         fd.close()
911
912     return qemu_emulator
913
914 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
915     def get_source_repometadata(repometadata):
916         src_repometadata=[]
917         for repo in repometadata:
918             if repo["name"].endswith("-source"):
919                 src_repometadata.append(repo)
920         if src_repometadata:
921             return src_repometadata
922         return None
923
924     def get_src_name(srpm):
925         m = SRPM_RE.match(srpm)
926         if m:
927             return m.group(1)
928         return None
929
930     src_repometadata = get_source_repometadata(repometadata)
931
932     if not src_repometadata:
933         msger.warning("No source repo found")
934         return None
935
936     src_pkgs = []
937     lpkgs_dict = {}
938     lpkgs_path = []
939     for repo in src_repometadata:
940         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
941         lpkgs_path += glob.glob(cachepath)
942
943     for lpkg in lpkgs_path:
944         lpkg_name = get_src_name(os.path.basename(lpkg))
945         lpkgs_dict[lpkg_name] = lpkg
946     localpkgs = lpkgs_dict.keys()
947
948     cached_count = 0
949     destdir = instroot+'/usr/src/SRPMS'
950     if not os.path.exists(destdir):
951         os.makedirs(destdir)
952
953     srcpkgset = set()
954     for _pkg in pkgs:
955         srcpkg_name = get_source_name(_pkg, repometadata)
956         if not srcpkg_name:
957             continue
958         srcpkgset.add(srcpkg_name)
959
960     for pkg in list(srcpkgset):
961         if pkg in localpkgs:
962             cached_count += 1
963             shutil.copy(lpkgs_dict[pkg], destdir)
964             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
965         else:
966             src_pkg = get_package(pkg, src_repometadata, 'src')
967             if src_pkg:
968                 shutil.copy(src_pkg, destdir)
969                 src_pkgs.append(src_pkg)
970     msger.info("%d source packages gotten from cache" % cached_count)
971
972     return src_pkgs
973
974 def strip_end(text, suffix):
975     if not text.endswith(suffix):
976         return text
977     return text[:-len(suffix)]