correct empty prefix string produce '-'
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 import os
19 import sys
20 import time
21 import tempfile
22 import re
23 import shutil
24 import glob
25 import hashlib
26 import subprocess
27 import platform
28 import rpmmisc
29 import hashlib
30 import sqlite3 as sqlite
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from errors import *
44 from fs_related import *
45 from grabber import myurlgrab
46 from proxy import get_proxy_for
47 import runner
48
49 from mic import msger
50
51 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
52 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
53 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
54
55 def build_name(kscfg, release=None, prefix = None, suffix = None):
56     """Construct and return an image name string.
57
58     This is a utility function to help create sensible name and fslabel
59     strings. The name is constructed using the sans-prefix-and-extension
60     kickstart filename and the supplied prefix and suffix.
61
62     kscfg -- a path to a kickstart file
63     release --  a replacement to suffix for image release
64     prefix -- a prefix to prepend to the name; defaults to None, which causes
65               no prefix to be used
66     suffix -- a suffix to append to the name; defaults to None, which causes
67               a YYYYMMDDHHMM suffix to be used
68
69     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
70
71     """
72     name = os.path.basename(kscfg)
73     idx = name.rfind('.')
74     if idx >= 0:
75         name = name[:idx]
76
77     if prefix is None:
78         prefix = ""
79     if suffix is None:
80         suffix = time.strftime("%Y%m%d%H%M")
81     if release is not None:
82         suffix = release
83
84     if name.startswith(prefix):
85         name = name[len(prefix):]
86
87     prefix = "%s-" % prefix if prefix else ""
88     suffix = "-%s" % suffix if suffix else ""
89
90     ret = prefix + name + suffix
91     return ret
92
93 def get_distro():
94     """Detect linux distribution, support "meego"
95     """
96
97     support_dists = ('SuSE',
98                      'debian',
99                      'fedora',
100                      'redhat',
101                      'centos',
102                      'meego',
103                      'moblin',
104                      'tizen')
105     try:
106         (dist, ver, id) = platform.linux_distribution( \
107                               supported_dists = support_dists)
108     except:
109         (dist, ver, id) = platform.dist( \
110                               supported_dists = support_dists)
111
112     return (dist, ver, id)
113
114 def get_distro_str():
115     """Get composited string for current linux distribution
116     """
117     (dist, ver, id) = get_distro()
118
119     if not dist:
120         return 'Unknown Linux Distro'
121     else:
122         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
123         return distro_str.strip()
124
125 _LOOP_RULE_PTH = None
126 def hide_loopdev_presentation():
127     udev_rules = "80-prevent-loop-present.rules"
128     udev_rules_dir = [
129                        '/usr/lib/udev/rules.d/',
130                        '/lib/udev/rules.d/',
131                        '/etc/udev/rules.d/'
132                      ]
133
134     for rdir in udev_rules_dir:
135         if os.path.exists(rdir):
136             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
137
138     if not _LOOP_RULE_PTH:
139         return
140
141     try:
142         with open(_LOOP_RULE_PTH, 'w') as wf:
143             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
144
145         runner.quiet('udevadm trigger')
146     except:
147         pass
148
149 def unhide_loopdev_presentation():
150     if not _LOOP_RULE_PTH:
151         return
152
153     try:
154         os.unlink(_LOOP_RULE_PTH)
155         runner.quiet('udevadm trigger')
156     except:
157         pass
158
159 def extract_rpm(rpmfile, targetdir):
160     rpm2cpio = find_binary_path("rpm2cpio")
161     cpio = find_binary_path("cpio")
162
163     olddir = os.getcwd()
164     os.chdir(targetdir)
165
166     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
167     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
168     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
169                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
170     (sout, serr) = p2.communicate()
171     msger.verbose(sout or serr)
172
173     os.chdir(olddir)
174
175 def compressing(fpath, method):
176     comp_map = {
177         "gz": "gzip",
178         "bz2": "bzip2"
179     }
180     if method not in comp_map:
181         raise CreatorError("Unsupport compress format: %s, valid values: %s"
182                            % (method, ','.join(comp_map.keys())))
183     cmd = find_binary_path(comp_map[method])
184     rc = runner.show([cmd, "-f", fpath])
185     if rc:
186         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
187
188 def taring(dstfile, target):
189     import tarfile
190     basen, ext = os.path.splitext(dstfile)
191     comp = {".tar": None,
192             ".gz": "gz", # for .tar.gz
193             ".bz2": "bz2", # for .tar.bz2
194             ".tgz": "gz",
195             ".tbz": "bz2"}[ext]
196
197     # specify tarball file path
198     if not comp:
199         tarpath = dstfile
200     elif basen.endswith(".tar"):
201         tarpath = basen
202     else:
203         tarpath = basen + ".tar"
204     wf = tarfile.open(tarpath, 'w')
205
206     if os.path.isdir(target):
207         for item in os.listdir(target):
208             wf.add(os.path.join(target, item), item)
209     else:
210         wf.add(target, os.path.basename(target))
211     wf.close()
212
213     if comp:
214         compressing(tarpath, comp)
215         # when dstfile ext is ".tgz" and ".tbz", should rename
216         if not basen.endswith(".tar"):
217             shutil.move("%s.%s" % (tarpath, comp), dstfile)
218
219 def ziping(dstfile, target):
220     import zipfile
221     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
222     if os.path.isdir(target):
223         for item in os.listdir(target):
224             fpath = os.path.join(target, item)
225             if not os.path.isfile(fpath):
226                 continue
227             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
228     else:
229         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
230     wf.close()
231
232 pack_formats = {
233     ".tar": taring,
234     ".tar.gz": taring,
235     ".tar.bz2": taring,
236     ".tgz": taring,
237     ".tbz": taring,
238     ".zip": ziping,
239 }
240
241 def packing(dstfile, target):
242     (base, ext) = os.path.splitext(dstfile)
243     if ext in (".gz", ".bz2") and base.endswith(".tar"):
244         ext = ".tar" + ext
245     if ext not in pack_formats:
246         raise CreatorError("Unsupport pack format: %s, valid values: %s"
247                            % (ext, ','.join(pack_formats.keys())))
248     func = pack_formats[ext]
249     # func should be callable
250     func(dstfile, target)
251
252 def human_size(size):
253     """Return human readable string for Bytes size
254     """
255
256     if size <= 0:
257         return "0M"
258     import math
259     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
260     expo = int(math.log(size, 1024))
261     mant = float(size/math.pow(1024, expo))
262     return "{0:.1f}{1:s}".format(mant, measure[expo])
263
264 def check_space_pre_cp(src, dst):
265     """Check whether disk space is enough before 'cp' like
266     operations, else exception will be raised.
267     """
268
269     srcsize  = get_file_size(src) * 1024 * 1024
270     freesize = get_filesystem_avail(dst)
271     if srcsize > freesize:
272         raise CreatorError("space on %s(%s) is not enough for about %s files"
273                            % (dst, human_size(freesize), human_size(srcsize)))
274
275 def calc_hashes(file_path, hash_names, start = 0, end = None):
276     """ Calculate hashes for a file. The 'file_path' argument is the file
277     to calculate hash functions for, 'start' and 'end' are the starting and
278     ending file offset to calculate the has functions for. The 'hash_names'
279     argument is a list of hash names to calculate. Returns the the list
280     of calculated hash values in the hexadecimal form in the same order
281     as 'hash_names'.
282     """
283     if end == None:
284         end = os.path.getsize(file_path)
285
286     chunk_size = 65536
287     to_read = end - start;
288     read = 0
289
290     hashes = []
291     for hash_name in hash_names:
292         hashes.append(hashlib.new(hash_name))
293
294     with open(file_path, "rb") as f:
295         f.seek(start)
296
297         while read < to_read:
298             if read + chunk_size > to_read:
299                 chunk_size = to_read - read
300             chunk = f.read(chunk_size)
301             for hash_obj in hashes:
302                 hash_obj.update(chunk)
303             read += chunk_size
304
305     result = []
306     for hash_obj in hashes:
307         result.append(hash_obj.hexdigest())
308
309     return result
310
311 def get_md5sum(fpath):
312     return calc_hashes(fpath, ('md5', ))[0]
313
314 def normalize_ksfile(ksconf, release, arch):
315     def _clrtempks():
316         try:
317             os.unlink(ksconf)
318         except:
319             pass
320
321     if not os.path.exists(ksconf):
322         return
323
324     if not release:
325         release = "latest"
326     if not arch or re.match(r'i.86', arch):
327         arch = "ia32"
328
329     with open(ksconf) as f:
330         ksc = f.read()
331
332     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
333         msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
334         ksc = ksc.replace("@ARCH@", arch)
335         ksc = ksc.replace("@BUILD_ID@", release)
336         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
337         os.write(fd, ksc)
338         os.close(fd)
339
340         msger.debug('new ks path %s' % ksconf)
341
342         import atexit
343         atexit.register(_clrtempks)
344
345     return ksconf
346
347 def _check_mic_chroot(rootdir):
348     def _path(path):
349         return rootdir.rstrip('/') + path
350
351     release_files = map(_path, [ "/etc/moblin-release",
352                                  "/etc/meego-release",
353                                  "/etc/tizen-release"])
354
355     if not any(map(os.path.exists, release_files)):
356         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
357
358     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
359         msger.warning("Failed to find kernel module under %s" % rootdir)
360
361     return
362
363 def selinux_check(arch, fstypes):
364     try:
365         getenforce = find_binary_path('getenforce')
366     except CreatorError:
367         return
368
369     selinux_status = runner.outs([getenforce])
370     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
371         raise CreatorError("Can't create arm image if selinux is enabled, "
372                            "please run 'setenforce 0' to disable selinux")
373
374     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
375     if use_btrfs and selinux_status == "Enforcing":
376         raise CreatorError("Can't create btrfs image if selinux is enabled,"
377                            " please run 'setenforce 0' to disable selinux")
378
379 def get_image_type(path):
380     def _get_extension_name(path):
381         match = re.search("(?<=\.)\w+$", path)
382         if match:
383             return match.group(0)
384         else:
385             return None
386
387     if os.path.isdir(path):
388         _check_mic_chroot(path)
389         return "fs"
390
391     maptab = {
392               "tar": "loop",
393               "raw":"raw",
394               "vmdk":"vmdk",
395               "vdi":"vdi",
396               "iso":"livecd",
397               "usbimg":"liveusb",
398              }
399
400     extension = _get_extension_name(path)
401     if extension in maptab:
402         return maptab[extension]
403
404     fd = open(path, "rb")
405     file_header = fd.read(1024)
406     fd.close()
407     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
408     if file_header[0:len(vdi_flag)] == vdi_flag:
409         return maptab["vdi"]
410
411     output = runner.outs(['file', path])
412     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
413     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
414     rawptn = re.compile(r".*x86 boot sector.*")
415     vmdkptn = re.compile(r".*VMware. disk image.*")
416     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
417     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
418     btrfsimgptn = re.compile(r".*BTRFS.*")
419     if isoptn.match(output):
420         return maptab["iso"]
421     elif usbimgptn.match(output):
422         return maptab["usbimg"]
423     elif rawptn.match(output):
424         return maptab["raw"]
425     elif vmdkptn.match(output):
426         return maptab["vmdk"]
427     elif ext3fsimgptn.match(output):
428         return "ext3fsimg"
429     elif ext4fsimgptn.match(output):
430         return "ext4fsimg"
431     elif btrfsimgptn.match(output):
432         return "btrfsimg"
433     else:
434         raise CreatorError("Cannot detect the type of image: %s" % path)
435
436 def get_file_size(file):
437     """ Return size in MB unit """
438     rc, duOutput  = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
439     if rc != 0:
440         raise CreatorError("Failed to run %s" % du)
441
442     size1 = int(duOutput.split()[0])
443     rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
444     if rc != 0:
445         raise CreatorError("Failed to run %s" % du)
446
447     size2 = int(duOutput.split()[0])
448     if size1 > size2:
449         return size1
450     else:
451         return size2
452
453 def get_filesystem_avail(fs):
454     vfstat = os.statvfs(fs)
455     return vfstat.f_bavail * vfstat.f_bsize
456
457 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
458     #convert disk format
459     if dstfmt != "raw":
460         raise CreatorError("Invalid destination image format: %s" % dstfmt)
461     msger.debug("converting %s image to %s" % (srcimg, dstimg))
462     if srcfmt == "vmdk":
463         path = find_binary_path("qemu-img")
464         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
465     elif srcfmt == "vdi":
466         path = find_binary_path("VBoxManage")
467         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
468     else:
469         raise CreatorError("Invalid soure image format: %s" % srcfmt)
470
471     rc = runner.show(argv)
472     if rc == 0:
473         msger.debug("convert successful")
474     if rc != 0:
475         raise CreatorError("Unable to convert disk to %s" % dstfmt)
476
477 def uncompress_squashfs(squashfsimg, outdir):
478     """Uncompress file system from squshfs image"""
479     unsquashfs = find_binary_path("unsquashfs")
480     args = [ unsquashfs, "-d", outdir, squashfsimg ]
481     rc = runner.show(args)
482     if (rc != 0):
483         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
484
485 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
486     """ FIXME: use the dir in mic.conf instead """
487
488     makedirs(dir)
489     return tempfile.mkdtemp(dir = dir, prefix = prefix)
490
491 def get_repostrs_from_ks(ks):
492     def _get_temp_reponame(baseurl):
493         md5obj = hashlib.md5(baseurl)
494         tmpreponame = "%s" % md5obj.hexdigest()
495         return tmpreponame
496
497     kickstart_repos = []
498
499     for repodata in ks.handler.repo.repoList:
500         repo = {}
501         for attr in ('name',
502                      'baseurl',
503                      'mirrorlist',
504                      'includepkgs', # val is list
505                      'excludepkgs', # val is list
506                      'cost',    # int
507                      'priority',# int
508                      'save',
509                      'proxy',
510                      'proxyuser',
511                      'proxypasswd',
512                      'proxypasswd',
513                      'debuginfo',
514                      'source',
515                      'gpgkey',
516                      'ssl_verify'):
517             if hasattr(repodata, attr) and getattr(repodata, attr):
518                 repo[attr] = getattr(repodata, attr)
519
520         if 'name' not in repo:
521             repo['name'] = _get_temp_reponame(repodata.baseurl)
522
523         kickstart_repos.append(repo)
524
525     return kickstart_repos
526
527 def _get_uncompressed_data_from_url(url, filename, proxies):
528     filename = myurlgrab(url, filename, proxies)
529     suffix = None
530     if filename.endswith(".gz"):
531         suffix = ".gz"
532         runner.quiet(['gunzip', "-f", filename])
533     elif filename.endswith(".bz2"):
534         suffix = ".bz2"
535         runner.quiet(['bunzip2', "-f", filename])
536     if suffix:
537         filename = filename.replace(suffix, "")
538     return filename
539
540 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
541                             sumtype=None, checksum=None):
542     url = os.path.join(baseurl, filename)
543     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
544     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
545         filename = os.path.splitext(filename_tmp)[0]
546     else:
547         filename = filename_tmp
548     if sumtype and checksum and os.path.exists(filename):
549         try:
550             sumcmd = find_binary_path("%ssum" % sumtype)
551         except:
552             file_checksum = None
553         else:
554             file_checksum = runner.outs([sumcmd, filename]).split()[0]
555
556         if file_checksum and file_checksum == checksum:
557             return filename
558
559     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
560
561 def get_metadata_from_repos(repos, cachedir):
562     my_repo_metadata = []
563     for repo in repos:
564         reponame = repo['name']
565         baseurl  = repo['baseurl']
566
567
568         if 'proxy' in repo:
569             proxy = repo['proxy']
570         else:
571             proxy = get_proxy_for(baseurl)
572
573         proxies = None
574         if proxy:
575            proxies = {str(baseurl.split(":")[0]):str(proxy)}
576
577         makedirs(os.path.join(cachedir, reponame))
578         url = os.path.join(baseurl, "repodata/repomd.xml")
579         filename = os.path.join(cachedir, reponame, 'repomd.xml')
580         repomd = myurlgrab(url, filename, proxies)
581         try:
582             root = xmlparse(repomd)
583         except SyntaxError:
584             raise CreatorError("repomd.xml syntax error.")
585
586         ns = root.getroot().tag
587         ns = ns[0:ns.rindex("}")+1]
588
589         filepaths = {}
590         checksums = {}
591         sumtypes = {}
592
593         for elm in root.getiterator("%sdata" % ns):
594             if elm.attrib["type"] == "patterns":
595                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
596                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
597                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
598                 break
599
600         for elm in root.getiterator("%sdata" % ns):
601             if elm.attrib["type"] in ("group_gz", "group"):
602                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
603                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
604                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
605                 break
606
607         primary_type = None
608         for elm in root.getiterator("%sdata" % ns):
609             if elm.attrib["type"] in ("primary_db", "primary"):
610                 primary_type = elm.attrib["type"]
611                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
612                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
613                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
614                 break
615
616         if not primary_type:
617             continue
618
619         for item in ("primary", "patterns", "comps"):
620             if item not in filepaths:
621                 filepaths[item] = None
622                 continue
623             if not filepaths[item]:
624                 continue
625             filepaths[item] = _get_metadata_from_repo(baseurl,
626                                                       proxies,
627                                                       cachedir,
628                                                       reponame,
629                                                       filepaths[item],
630                                                       sumtypes[item],
631                                                       checksums[item])
632
633         """ Get repo key """
634         try:
635             repokey = _get_metadata_from_repo(baseurl,
636                                               proxies,
637                                               cachedir,
638                                               reponame,
639                                               "repodata/repomd.xml.key")
640         except CreatorError:
641             repokey = None
642             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
643
644         my_repo_metadata.append({"name":reponame,
645                                  "baseurl":baseurl,
646                                  "repomd":repomd,
647                                  "primary":filepaths['primary'],
648                                  "cachedir":cachedir,
649                                  "proxies":proxies,
650                                  "patterns":filepaths['patterns'],
651                                  "comps":filepaths['comps'],
652                                  "repokey":repokey})
653
654     return my_repo_metadata
655
656 def get_rpmver_in_repo(repometadata):
657     for repo in repometadata:
658         if repo["primary"].endswith(".xml"):
659             root = xmlparse(repo["primary"])
660             ns = root.getroot().tag
661             ns = ns[0:ns.rindex("}")+1]
662
663             versionlist = []
664             for elm in root.getiterator("%spackage" % ns):
665                 if elm.find("%sname" % ns).text == 'rpm':
666                     for node in elm.getchildren():
667                         if node.tag == "%sversion" % ns:
668                             versionlist.append(node.attrib['ver'])
669
670             if versionlist:
671                 return reversed(
672                          sorted(
673                            versionlist,
674                            key = lambda ver: map(int, ver.split('.')))).next()
675
676         elif repo["primary"].endswith(".sqlite"):
677             con = sqlite.connect(repo["primary"])
678             for row in con.execute("select version from packages where "
679                                    "name=\"rpm\" ORDER by version DESC"):
680                 con.close()
681                 return row[0]
682
683     return None
684
685 def get_arch(repometadata):
686     def uniqarch(archlist=[]):
687         uniq_arch = []
688         for i in range(len(archlist)):
689             if archlist[i] not in rpmmisc.archPolicies.keys():
690                 continue
691             need_append = True
692             j = 0
693             while j < len(uniq_arch):
694                 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
695                     need_append = False
696                     break
697                 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
698                     if need_append:
699                         uniq_arch[j] = archlist[i]
700                         need_append = False
701                     else:
702                         uniq_arch.remove(uniq_arch[j])
703                         continue
704                 j += 1
705             if need_append:
706                  uniq_arch.append(archlist[i])
707
708         return uniq_arch
709     
710
711     ret_uniq_arch = []
712     ret_arch_list = []
713     for repo in repometadata:
714         archlist = []
715         if repo["primary"].endswith(".xml"):
716             root = xmlparse(repo["primary"])
717             ns = root.getroot().tag
718             ns = ns[0:ns.rindex("}")+1]
719             for elm in root.getiterator("%spackage" % ns):
720                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
721                     arch = elm.find("%sarch" % ns).text
722                     if arch not in archlist:
723                         archlist.append(arch)
724         elif repo["primary"].endswith(".sqlite"):
725             con = sqlite.connect(repo["primary"])
726             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
727                 if row[0] not in archlist:
728                     archlist.append(row[0])
729
730             con.close()
731
732         uniq_arch = uniqarch(archlist)
733         if not ret_uniq_arch and len(uniq_arch) == 1:
734             ret_uniq_arch = uniq_arch 
735         ret_arch_list += uniq_arch
736
737     ret_arch_list = uniqarch(ret_arch_list)
738     return ret_uniq_arch, ret_arch_list
739
740 def get_package(pkg, repometadata, arch = None):
741     ver = ""
742     target_repo = None
743     if not arch:
744         arches = []
745     elif arch not in rpmmisc.archPolicies:
746         arches = [arch]
747     else:
748         arches = rpmmisc.archPolicies[arch].split(':')
749         arches.append('noarch')
750
751     for repo in repometadata:
752         if repo["primary"].endswith(".xml"):
753             root = xmlparse(repo["primary"])
754             ns = root.getroot().tag
755             ns = ns[0:ns.rindex("}")+1]
756             for elm in root.getiterator("%spackage" % ns):
757                 if elm.find("%sname" % ns).text == pkg:
758                     if elm.find("%sarch" % ns).text in arches:
759                         version = elm.find("%sversion" % ns)
760                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
761                         if tmpver > ver:
762                             ver = tmpver
763                             location = elm.find("%slocation" % ns)
764                             pkgpath = "%s" % location.attrib['href']
765                             target_repo = repo
766                         break
767         if repo["primary"].endswith(".sqlite"):
768             con = sqlite.connect(repo["primary"])
769             if arch:
770                 sql = 'select version, release, location_href from packages ' \
771                       'where name = "%s" and arch IN ("%s")' % \
772                       (pkg, '","'.join(arches))
773                 for row in con.execute(sql):
774                     tmpver = "%s-%s" % (row[0], row[1])
775                     if tmpver > ver:
776                         ver = tmpver
777                         pkgpath = "%s" % row[2]
778                         target_repo = repo
779                     break
780             else:
781                 sql = 'select version, release, location_href from packages ' \
782                       'where name = "%s"' % pkg
783                 for row in con.execute(sql):
784                     tmpver = "%s-%s" % (row[0], row[1])
785                     if tmpver > ver:
786                         ver = tmpver
787                         pkgpath = "%s" % row[2]
788                         target_repo = repo
789                     break
790             con.close()
791     if target_repo:
792         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
793         url = os.path.join(target_repo["baseurl"], pkgpath)
794         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
795         if os.path.exists(filename):
796             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
797             if ret == 0:
798                 return filename
799
800             msger.warning("package %s is damaged: %s" %
801                           (os.path.basename(filename), filename))
802             os.unlink(filename)
803
804         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
805         return pkg
806     else:
807         return None
808
809 def get_source_name(pkg, repometadata):
810
811     def get_bin_name(pkg):
812         m = RPM_RE.match(pkg)
813         if m:
814             return m.group(1)
815         return None
816
817     def get_src_name(srpm):
818         m = SRPM_RE.match(srpm)
819         if m:
820             return m.group(1)
821         return None
822
823     ver = ""
824     target_repo = None
825
826     pkg_name = get_bin_name(pkg)
827     if not pkg_name:
828         return None
829
830     for repo in repometadata:
831         if repo["primary"].endswith(".xml"):
832             root = xmlparse(repo["primary"])
833             ns = root.getroot().tag
834             ns = ns[0:ns.rindex("}")+1]
835             for elm in root.getiterator("%spackage" % ns):
836                 if elm.find("%sname" % ns).text == pkg_name:
837                     if elm.find("%sarch" % ns).text != "src":
838                         version = elm.find("%sversion" % ns)
839                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
840                         if tmpver > ver:
841                             ver = tmpver
842                             fmt = elm.find("%sformat" % ns)
843                             if fmt:
844                                 fns = fmt.getchildren()[0].tag
845                                 fns = fns[0:fns.rindex("}")+1]
846                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
847                                 target_repo = repo
848                         break
849
850         if repo["primary"].endswith(".sqlite"):
851             con = sqlite.connect(repo["primary"])
852             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
853                 tmpver = "%s-%s" % (row[0], row[1])
854                 if tmpver > ver:
855                     pkgpath = "%s" % row[2]
856                     target_repo = repo
857                 break
858             con.close()
859     if target_repo:
860         return get_src_name(pkgpath)
861     else:
862         return None
863
864 def get_pkglist_in_patterns(group, patterns):
865     found = False
866     pkglist = []
867     try:
868         root = xmlparse(patterns)
869     except SyntaxError:
870         raise SyntaxError("%s syntax error." % patterns)
871
872     for elm in list(root.getroot()):
873         ns = elm.tag
874         ns = ns[0:ns.rindex("}")+1]
875         name = elm.find("%sname" % ns)
876         summary = elm.find("%ssummary" % ns)
877         if name.text == group or summary.text == group:
878             found = True
879             break
880
881     if not found:
882         return pkglist
883
884     found = False
885     for requires in list(elm):
886         if requires.tag.endswith("requires"):
887             found = True
888             break
889
890     if not found:
891         return pkglist
892
893     for pkg in list(requires):
894         pkgname = pkg.attrib["name"]
895         if pkgname not in pkglist:
896             pkglist.append(pkgname)
897
898     return pkglist
899
900 def get_pkglist_in_comps(group, comps):
901     found = False
902     pkglist = []
903     try:
904         root = xmlparse(comps)
905     except SyntaxError:
906         raise SyntaxError("%s syntax error." % comps)
907
908     for elm in root.getiterator("group"):
909         id = elm.find("id")
910         name = elm.find("name")
911         if id.text == group or name.text == group:
912             packagelist = elm.find("packagelist")
913             found = True
914             break
915
916     if not found:
917         return pkglist
918
919     for require in elm.getiterator("packagereq"):
920         if require.tag.endswith("packagereq"):
921             pkgname = require.text
922         if pkgname not in pkglist:
923             pkglist.append(pkgname)
924
925     return pkglist
926
927 def is_statically_linked(binary):
928     return ", statically linked, " in runner.outs(['file', binary])
929
930 def setup_qemu_emulator(rootdir, arch):
931     # mount binfmt_misc if it doesn't exist
932     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
933         modprobecmd = find_binary_path("modprobe")
934         runner.show([modprobecmd, "binfmt_misc"])
935     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
936         mountcmd = find_binary_path("mount")
937         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
938
939     # qemu_emulator is a special case, we can't use find_binary_path
940     # qemu emulator should be a statically-linked executable file
941     qemu_emulator = "/usr/bin/qemu-arm"
942     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
943         qemu_emulator = "/usr/bin/qemu-arm-static"
944     if not os.path.exists(qemu_emulator):
945         raise CreatorError("Please install a statically-linked qemu-arm")
946
947     # qemu emulator version check
948     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
949     if arch in armv7_list:  # need qemu (>=0.13.0)
950         qemuout = runner.outs([qemu_emulator, "-h"])
951         m = re.search("version\s*([.\d]+)", qemuout)
952         if m:
953             qemu_version = m.group(1)
954             if qemu_version < "0.13":
955                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
956         else:
957             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
958
959     if not os.path.exists(rootdir + "/usr/bin"):
960         makedirs(rootdir + "/usr/bin")
961     shutil.copy(qemu_emulator, rootdir + "/usr/bin/qemu-arm-static")
962     qemu_emulator = "/usr/bin/qemu-arm-static"
963
964     # disable selinux, selinux will block qemu emulator to run
965     if os.path.exists("/usr/sbin/setenforce"):
966         msger.info('Try to disable selinux')
967         runner.show(["/usr/sbin/setenforce", "0"])
968
969     # unregister it if it has been registered and is a dynamically-linked executable
970     node = "/proc/sys/fs/binfmt_misc/arm"
971     if os.path.exists(node):
972         qemu_unregister_string = "-1\n"
973         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
974         fd.write(qemu_unregister_string)
975         fd.close()
976
977     # register qemu emulator for interpreting other arch executable file
978     if not os.path.exists(node):
979         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
980         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
981         fd.write(qemu_arm_string)
982         fd.close()
983
984     return qemu_emulator
985
986 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
987     def get_source_repometadata(repometadata):
988         src_repometadata=[]
989         for repo in repometadata:
990             if repo["name"].endswith("-source"):
991                 src_repometadata.append(repo)
992         if src_repometadata:
993             return src_repometadata
994         return None
995
996     def get_src_name(srpm):
997         m = SRPM_RE.match(srpm)
998         if m:
999             return m.group(1)
1000         return None
1001
1002     src_repometadata = get_source_repometadata(repometadata)
1003
1004     if not src_repometadata:
1005         msger.warning("No source repo found")
1006         return None
1007
1008     src_pkgs = []
1009     lpkgs_dict = {}
1010     lpkgs_path = []
1011     for repo in src_repometadata:
1012         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1013         lpkgs_path += glob.glob(cachepath)
1014
1015     for lpkg in lpkgs_path:
1016         lpkg_name = get_src_name(os.path.basename(lpkg))
1017         lpkgs_dict[lpkg_name] = lpkg
1018     localpkgs = lpkgs_dict.keys()
1019
1020     cached_count = 0
1021     destdir = instroot+'/usr/src/SRPMS'
1022     if not os.path.exists(destdir):
1023         os.makedirs(destdir)
1024
1025     srcpkgset = set()
1026     for _pkg in pkgs:
1027         srcpkg_name = get_source_name(_pkg, repometadata)
1028         if not srcpkg_name:
1029             continue
1030         srcpkgset.add(srcpkg_name)
1031
1032     for pkg in list(srcpkgset):
1033         if pkg in localpkgs:
1034             cached_count += 1
1035             shutil.copy(lpkgs_dict[pkg], destdir)
1036             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1037         else:
1038             src_pkg = get_package(pkg, src_repometadata, 'src')
1039             if src_pkg:
1040                 shutil.copy(src_pkg, destdir)
1041                 src_pkgs.append(src_pkg)
1042     msger.info("%d source packages gotten from cache" % cached_count)
1043
1044     return src_pkgs
1045
1046 def strip_end(text, suffix):
1047     if not text.endswith(suffix):
1048         return text
1049     return text[:-len(suffix)]