Merge "partitionedfs: improve debugging prints" into devel
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 import os
19 import sys
20 import time
21 import tempfile
22 import re
23 import shutil
24 import glob
25 import hashlib
26 import subprocess
27 import platform
28 import rpmmisc
29 import hashlib
30 import sqlite3 as sqlite
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from errors import *
44 from fs_related import *
45 from grabber import myurlgrab
46 from proxy import get_proxy_for
47 import runner
48
49 from mic import msger
50
51 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
52 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
53 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
54
55 def build_name(kscfg, release=None, prefix = None, suffix = None):
56     """Construct and return an image name string.
57
58     This is a utility function to help create sensible name and fslabel
59     strings. The name is constructed using the sans-prefix-and-extension
60     kickstart filename and the supplied prefix and suffix.
61
62     kscfg -- a path to a kickstart file
63     release --  a replacement to suffix for image release
64     prefix -- a prefix to prepend to the name; defaults to None, which causes
65               no prefix to be used
66     suffix -- a suffix to append to the name; defaults to None, which causes
67               a YYYYMMDDHHMM suffix to be used
68
69     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
70
71     """
72     name = os.path.basename(kscfg)
73     idx = name.rfind('.')
74     if idx >= 0:
75         name = name[:idx]
76
77     if release is not None:
78         suffix = ""
79     if prefix is None:
80         prefix = ""
81     if suffix is None:
82         suffix = time.strftime("%Y%m%d%H%M")
83
84     if name.startswith(prefix):
85         name = name[len(prefix):]
86
87     prefix = "%s-" % prefix if prefix else ""
88     suffix = "-%s" % suffix if suffix else ""
89
90     ret = prefix + name + suffix
91     return ret
92
93 def get_distro():
94     """Detect linux distribution, support "meego"
95     """
96
97     support_dists = ('SuSE',
98                      'debian',
99                      'fedora',
100                      'redhat',
101                      'centos',
102                      'meego',
103                      'moblin',
104                      'tizen')
105     try:
106         (dist, ver, id) = platform.linux_distribution( \
107                               supported_dists = support_dists)
108     except:
109         (dist, ver, id) = platform.dist( \
110                               supported_dists = support_dists)
111
112     return (dist, ver, id)
113
114 def get_distro_str():
115     """Get composited string for current linux distribution
116     """
117     (dist, ver, id) = get_distro()
118
119     if not dist:
120         return 'Unknown Linux Distro'
121     else:
122         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
123         return distro_str.strip()
124
125 _LOOP_RULE_PTH = None
126
127 def hide_loopdev_presentation():
128     udev_rules = "80-prevent-loop-present.rules"
129     udev_rules_dir = [
130                        '/usr/lib/udev/rules.d/',
131                        '/lib/udev/rules.d/',
132                        '/etc/udev/rules.d/'
133                      ]
134
135     global _LOOP_RULE_PTH
136
137     for rdir in udev_rules_dir:
138         if os.path.exists(rdir):
139             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
140
141     if not _LOOP_RULE_PTH:
142         return
143
144     try:
145         with open(_LOOP_RULE_PTH, 'w') as wf:
146             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
147
148         runner.quiet('udevadm trigger')
149     except:
150         pass
151
152 def unhide_loopdev_presentation():
153     global _LOOP_RULE_PTH
154
155     if not _LOOP_RULE_PTH:
156         return
157
158     try:
159         os.unlink(_LOOP_RULE_PTH)
160         runner.quiet('udevadm trigger')
161     except:
162         pass
163
164 def extract_rpm(rpmfile, targetdir):
165     rpm2cpio = find_binary_path("rpm2cpio")
166     cpio = find_binary_path("cpio")
167
168     olddir = os.getcwd()
169     os.chdir(targetdir)
170
171     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
172     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
173     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
174                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
175     (sout, serr) = p2.communicate()
176     msger.verbose(sout or serr)
177
178     os.chdir(olddir)
179
180 def compressing(fpath, method):
181     comp_map = {
182         "gz": "gzip",
183         "bz2": "bzip2"
184     }
185     if method not in comp_map:
186         raise CreatorError("Unsupport compress format: %s, valid values: %s"
187                            % (method, ','.join(comp_map.keys())))
188     cmd = find_binary_path(comp_map[method])
189     rc = runner.show([cmd, "-f", fpath])
190     if rc:
191         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
192
193 def taring(dstfile, target):
194     import tarfile
195     basen, ext = os.path.splitext(dstfile)
196     comp = {".tar": None,
197             ".gz": "gz", # for .tar.gz
198             ".bz2": "bz2", # for .tar.bz2
199             ".tgz": "gz",
200             ".tbz": "bz2"}[ext]
201
202     # specify tarball file path
203     if not comp:
204         tarpath = dstfile
205     elif basen.endswith(".tar"):
206         tarpath = basen
207     else:
208         tarpath = basen + ".tar"
209     wf = tarfile.open(tarpath, 'w')
210
211     if os.path.isdir(target):
212         for item in os.listdir(target):
213             wf.add(os.path.join(target, item), item)
214     else:
215         wf.add(target, os.path.basename(target))
216     wf.close()
217
218     if comp:
219         compressing(tarpath, comp)
220         # when dstfile ext is ".tgz" and ".tbz", should rename
221         if not basen.endswith(".tar"):
222             shutil.move("%s.%s" % (tarpath, comp), dstfile)
223
224 def ziping(dstfile, target):
225     import zipfile
226     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
227     if os.path.isdir(target):
228         for item in os.listdir(target):
229             fpath = os.path.join(target, item)
230             if not os.path.isfile(fpath):
231                 continue
232             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
233     else:
234         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
235     wf.close()
236
237 pack_formats = {
238     ".tar": taring,
239     ".tar.gz": taring,
240     ".tar.bz2": taring,
241     ".tgz": taring,
242     ".tbz": taring,
243     ".zip": ziping,
244 }
245
246 def packing(dstfile, target):
247     (base, ext) = os.path.splitext(dstfile)
248     if ext in (".gz", ".bz2") and base.endswith(".tar"):
249         ext = ".tar" + ext
250     if ext not in pack_formats:
251         raise CreatorError("Unsupport pack format: %s, valid values: %s"
252                            % (ext, ','.join(pack_formats.keys())))
253     func = pack_formats[ext]
254     # func should be callable
255     func(dstfile, target)
256
257 def human_size(size):
258     """Return human readable string for Bytes size
259     """
260
261     if size <= 0:
262         return "0M"
263     import math
264     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
265     expo = int(math.log(size, 1024))
266     mant = float(size/math.pow(1024, expo))
267     return "{0:.1f}{1:s}".format(mant, measure[expo])
268
269 def check_space_pre_cp(src, dst):
270     """Check whether disk space is enough before 'cp' like
271     operations, else exception will be raised.
272     """
273
274     srcsize  = get_file_size(src) * 1024 * 1024
275     freesize = get_filesystem_avail(dst)
276     if srcsize > freesize:
277         raise CreatorError("space on %s(%s) is not enough for about %s files"
278                            % (dst, human_size(freesize), human_size(srcsize)))
279
280 def calc_hashes(file_path, hash_names, start = 0, end = None):
281     """ Calculate hashes for a file. The 'file_path' argument is the file
282     to calculate hash functions for, 'start' and 'end' are the starting and
283     ending file offset to calculate the has functions for. The 'hash_names'
284     argument is a list of hash names to calculate. Returns the the list
285     of calculated hash values in the hexadecimal form in the same order
286     as 'hash_names'.
287     """
288     if end == None:
289         end = os.path.getsize(file_path)
290
291     chunk_size = 65536
292     to_read = end - start;
293     read = 0
294
295     hashes = []
296     for hash_name in hash_names:
297         hashes.append(hashlib.new(hash_name))
298
299     with open(file_path, "rb") as f:
300         f.seek(start)
301
302         while read < to_read:
303             if read + chunk_size > to_read:
304                 chunk_size = to_read - read
305             chunk = f.read(chunk_size)
306             for hash_obj in hashes:
307                 hash_obj.update(chunk)
308             read += chunk_size
309
310     result = []
311     for hash_obj in hashes:
312         result.append(hash_obj.hexdigest())
313
314     return result
315
316 def get_md5sum(fpath):
317     return calc_hashes(fpath, ('md5', ))[0]
318
319 def normalize_ksfile(ksconf, release, arch):
320     def _clrtempks():
321         try:
322             os.unlink(ksconf)
323         except:
324             pass
325
326     if not os.path.exists(ksconf):
327         return
328
329     if not release:
330         release = "latest"
331     if not arch or re.match(r'i.86', arch):
332         arch = "ia32"
333
334     with open(ksconf) as f:
335         ksc = f.read()
336
337     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
338         msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
339         ksc = ksc.replace("@ARCH@", arch)
340         ksc = ksc.replace("@BUILD_ID@", release)
341         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
342         os.write(fd, ksc)
343         os.close(fd)
344
345         msger.debug('new ks path %s' % ksconf)
346
347         import atexit
348         atexit.register(_clrtempks)
349
350     return ksconf
351
352 def _check_mic_chroot(rootdir):
353     def _path(path):
354         return rootdir.rstrip('/') + path
355
356     release_files = map(_path, [ "/etc/moblin-release",
357                                  "/etc/meego-release",
358                                  "/etc/tizen-release"])
359
360     if not any(map(os.path.exists, release_files)):
361         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
362
363     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
364         msger.warning("Failed to find kernel module under %s" % rootdir)
365
366     return
367
368 def selinux_check(arch, fstypes):
369     try:
370         getenforce = find_binary_path('getenforce')
371     except CreatorError:
372         return
373
374     selinux_status = runner.outs([getenforce])
375     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
376         raise CreatorError("Can't create arm image if selinux is enabled, "
377                            "please run 'setenforce 0' to disable selinux")
378
379     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
380     if use_btrfs and selinux_status == "Enforcing":
381         raise CreatorError("Can't create btrfs image if selinux is enabled,"
382                            " please run 'setenforce 0' to disable selinux")
383
384 def get_image_type(path):
385     def _get_extension_name(path):
386         match = re.search("(?<=\.)\w+$", path)
387         if match:
388             return match.group(0)
389         else:
390             return None
391
392     if os.path.isdir(path):
393         _check_mic_chroot(path)
394         return "fs"
395
396     maptab = {
397               "tar": "loop",
398               "raw":"raw",
399               "vmdk":"vmdk",
400               "vdi":"vdi",
401               "iso":"livecd",
402               "usbimg":"liveusb",
403              }
404
405     extension = _get_extension_name(path)
406     if extension in maptab:
407         return maptab[extension]
408
409     fd = open(path, "rb")
410     file_header = fd.read(1024)
411     fd.close()
412     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
413     if file_header[0:len(vdi_flag)] == vdi_flag:
414         return maptab["vdi"]
415
416     output = runner.outs(['file', path])
417     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
418     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
419     rawptn = re.compile(r".*x86 boot sector.*")
420     vmdkptn = re.compile(r".*VMware. disk image.*")
421     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
422     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
423     btrfsimgptn = re.compile(r".*BTRFS.*")
424     if isoptn.match(output):
425         return maptab["iso"]
426     elif usbimgptn.match(output):
427         return maptab["usbimg"]
428     elif rawptn.match(output):
429         return maptab["raw"]
430     elif vmdkptn.match(output):
431         return maptab["vmdk"]
432     elif ext3fsimgptn.match(output):
433         return "ext3fsimg"
434     elif ext4fsimgptn.match(output):
435         return "ext4fsimg"
436     elif btrfsimgptn.match(output):
437         return "btrfsimg"
438     else:
439         raise CreatorError("Cannot detect the type of image: %s" % path)
440
441
442 def get_file_size(filename):
443     """ Return size in MB unit """
444     cmd = ['du', "-s", "-b", "-B", "1M", filename]
445     rc, duOutput  = runner.runtool(cmd)
446     if rc != 0:
447         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
448     size1 = int(duOutput.split()[0])
449
450     cmd = ['du', "-s", "-B", "1M", filename]
451     rc, duOutput = runner.runtool(cmd)
452     if rc != 0:
453         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
454
455     size2 = int(duOutput.split()[0])
456     return max(size1, size2)
457
458
459 def get_filesystem_avail(fs):
460     vfstat = os.statvfs(fs)
461     return vfstat.f_bavail * vfstat.f_bsize
462
463 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
464     #convert disk format
465     if dstfmt != "raw":
466         raise CreatorError("Invalid destination image format: %s" % dstfmt)
467     msger.debug("converting %s image to %s" % (srcimg, dstimg))
468     if srcfmt == "vmdk":
469         path = find_binary_path("qemu-img")
470         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
471     elif srcfmt == "vdi":
472         path = find_binary_path("VBoxManage")
473         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
474     else:
475         raise CreatorError("Invalid soure image format: %s" % srcfmt)
476
477     rc = runner.show(argv)
478     if rc == 0:
479         msger.debug("convert successful")
480     if rc != 0:
481         raise CreatorError("Unable to convert disk to %s" % dstfmt)
482
483 def uncompress_squashfs(squashfsimg, outdir):
484     """Uncompress file system from squshfs image"""
485     unsquashfs = find_binary_path("unsquashfs")
486     args = [ unsquashfs, "-d", outdir, squashfsimg ]
487     rc = runner.show(args)
488     if (rc != 0):
489         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
490
491 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
492     """ FIXME: use the dir in mic.conf instead """
493
494     makedirs(dir)
495     return tempfile.mkdtemp(dir = dir, prefix = prefix)
496
497 def get_repostrs_from_ks(ks):
498     def _get_temp_reponame(baseurl):
499         md5obj = hashlib.md5(baseurl)
500         tmpreponame = "%s" % md5obj.hexdigest()
501         return tmpreponame
502
503     kickstart_repos = []
504
505     for repodata in ks.handler.repo.repoList:
506         repo = {}
507         for attr in ('name',
508                      'baseurl',
509                      'mirrorlist',
510                      'includepkgs', # val is list
511                      'excludepkgs', # val is list
512                      'cost',    # int
513                      'priority',# int
514                      'save',
515                      'proxy',
516                      'proxyuser',
517                      'proxypasswd',
518                      'proxypasswd',
519                      'debuginfo',
520                      'source',
521                      'gpgkey',
522                      'ssl_verify'):
523             if hasattr(repodata, attr) and getattr(repodata, attr):
524                 repo[attr] = getattr(repodata, attr)
525
526         if 'name' not in repo:
527             repo['name'] = _get_temp_reponame(repodata.baseurl)
528
529         kickstart_repos.append(repo)
530
531     return kickstart_repos
532
533 def _get_uncompressed_data_from_url(url, filename, proxies):
534     filename = myurlgrab(url, filename, proxies)
535     suffix = None
536     if filename.endswith(".gz"):
537         suffix = ".gz"
538         runner.quiet(['gunzip', "-f", filename])
539     elif filename.endswith(".bz2"):
540         suffix = ".bz2"
541         runner.quiet(['bunzip2', "-f", filename])
542     if suffix:
543         filename = filename.replace(suffix, "")
544     return filename
545
546 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
547                             sumtype=None, checksum=None):
548     url = os.path.join(baseurl, filename)
549     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
550     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
551         filename = os.path.splitext(filename_tmp)[0]
552     else:
553         filename = filename_tmp
554     if sumtype and checksum and os.path.exists(filename):
555         try:
556             sumcmd = find_binary_path("%ssum" % sumtype)
557         except:
558             file_checksum = None
559         else:
560             file_checksum = runner.outs([sumcmd, filename]).split()[0]
561
562         if file_checksum and file_checksum == checksum:
563             return filename
564
565     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
566
567 def get_metadata_from_repos(repos, cachedir):
568     my_repo_metadata = []
569     for repo in repos:
570         reponame = repo['name']
571         baseurl  = repo['baseurl']
572
573
574         if 'proxy' in repo:
575             proxy = repo['proxy']
576         else:
577             proxy = get_proxy_for(baseurl)
578
579         proxies = None
580         if proxy:
581            proxies = {str(baseurl.split(":")[0]):str(proxy)}
582
583         makedirs(os.path.join(cachedir, reponame))
584         url = os.path.join(baseurl, "repodata/repomd.xml")
585         filename = os.path.join(cachedir, reponame, 'repomd.xml')
586         repomd = myurlgrab(url, filename, proxies)
587         try:
588             root = xmlparse(repomd)
589         except SyntaxError:
590             raise CreatorError("repomd.xml syntax error.")
591
592         ns = root.getroot().tag
593         ns = ns[0:ns.rindex("}")+1]
594
595         filepaths = {}
596         checksums = {}
597         sumtypes = {}
598
599         for elm in root.getiterator("%sdata" % ns):
600             if elm.attrib["type"] == "patterns":
601                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
602                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
603                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
604                 break
605
606         for elm in root.getiterator("%sdata" % ns):
607             if elm.attrib["type"] in ("group_gz", "group"):
608                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
609                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
610                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
611                 break
612
613         primary_type = None
614         for elm in root.getiterator("%sdata" % ns):
615             if elm.attrib["type"] in ("primary_db", "primary"):
616                 primary_type = elm.attrib["type"]
617                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
618                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
619                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
620                 break
621
622         if not primary_type:
623             continue
624
625         for item in ("primary", "patterns", "comps"):
626             if item not in filepaths:
627                 filepaths[item] = None
628                 continue
629             if not filepaths[item]:
630                 continue
631             filepaths[item] = _get_metadata_from_repo(baseurl,
632                                                       proxies,
633                                                       cachedir,
634                                                       reponame,
635                                                       filepaths[item],
636                                                       sumtypes[item],
637                                                       checksums[item])
638
639         """ Get repo key """
640         try:
641             repokey = _get_metadata_from_repo(baseurl,
642                                               proxies,
643                                               cachedir,
644                                               reponame,
645                                               "repodata/repomd.xml.key")
646         except CreatorError:
647             repokey = None
648             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
649
650         my_repo_metadata.append({"name":reponame,
651                                  "baseurl":baseurl,
652                                  "repomd":repomd,
653                                  "primary":filepaths['primary'],
654                                  "cachedir":cachedir,
655                                  "proxies":proxies,
656                                  "patterns":filepaths['patterns'],
657                                  "comps":filepaths['comps'],
658                                  "repokey":repokey})
659
660     return my_repo_metadata
661
662 def get_rpmver_in_repo(repometadata):
663     for repo in repometadata:
664         if repo["primary"].endswith(".xml"):
665             root = xmlparse(repo["primary"])
666             ns = root.getroot().tag
667             ns = ns[0:ns.rindex("}")+1]
668
669             versionlist = []
670             for elm in root.getiterator("%spackage" % ns):
671                 if elm.find("%sname" % ns).text == 'rpm':
672                     for node in elm.getchildren():
673                         if node.tag == "%sversion" % ns:
674                             versionlist.append(node.attrib['ver'])
675
676             if versionlist:
677                 return reversed(
678                          sorted(
679                            versionlist,
680                            key = lambda ver: map(int, ver.split('.')))).next()
681
682         elif repo["primary"].endswith(".sqlite"):
683             con = sqlite.connect(repo["primary"])
684             for row in con.execute("select version from packages where "
685                                    "name=\"rpm\" ORDER by version DESC"):
686                 con.close()
687                 return row[0]
688
689     return None
690
691 def get_arch(repometadata):
692     def uniqarch(archlist=[]):
693         uniq_arch = []
694         for i in range(len(archlist)):
695             if archlist[i] not in rpmmisc.archPolicies.keys():
696                 continue
697             need_append = True
698             j = 0
699             while j < len(uniq_arch):
700                 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
701                     need_append = False
702                     break
703                 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
704                     if need_append:
705                         uniq_arch[j] = archlist[i]
706                         need_append = False
707                     else:
708                         uniq_arch.remove(uniq_arch[j])
709                         continue
710                 j += 1
711             if need_append:
712                  uniq_arch.append(archlist[i])
713
714         return uniq_arch
715
716
717     ret_uniq_arch = []
718     ret_arch_list = []
719     for repo in repometadata:
720         archlist = []
721         if repo["primary"].endswith(".xml"):
722             root = xmlparse(repo["primary"])
723             ns = root.getroot().tag
724             ns = ns[0:ns.rindex("}")+1]
725             for elm in root.getiterator("%spackage" % ns):
726                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
727                     arch = elm.find("%sarch" % ns).text
728                     if arch not in archlist:
729                         archlist.append(arch)
730         elif repo["primary"].endswith(".sqlite"):
731             con = sqlite.connect(repo["primary"])
732             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
733                 if row[0] not in archlist:
734                     archlist.append(row[0])
735
736             con.close()
737
738         uniq_arch = uniqarch(archlist)
739         if not ret_uniq_arch and len(uniq_arch) == 1:
740             ret_uniq_arch = uniq_arch
741         ret_arch_list += uniq_arch
742
743     ret_arch_list = uniqarch(ret_arch_list)
744     return ret_uniq_arch, ret_arch_list
745
746 def get_package(pkg, repometadata, arch = None):
747     ver = ""
748     target_repo = None
749     if not arch:
750         arches = []
751     elif arch not in rpmmisc.archPolicies:
752         arches = [arch]
753     else:
754         arches = rpmmisc.archPolicies[arch].split(':')
755         arches.append('noarch')
756
757     for repo in repometadata:
758         if repo["primary"].endswith(".xml"):
759             root = xmlparse(repo["primary"])
760             ns = root.getroot().tag
761             ns = ns[0:ns.rindex("}")+1]
762             for elm in root.getiterator("%spackage" % ns):
763                 if elm.find("%sname" % ns).text == pkg:
764                     if elm.find("%sarch" % ns).text in arches:
765                         version = elm.find("%sversion" % ns)
766                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
767                         if tmpver > ver:
768                             ver = tmpver
769                             location = elm.find("%slocation" % ns)
770                             pkgpath = "%s" % location.attrib['href']
771                             target_repo = repo
772                         break
773         if repo["primary"].endswith(".sqlite"):
774             con = sqlite.connect(repo["primary"])
775             if arch:
776                 sql = 'select version, release, location_href from packages ' \
777                       'where name = "%s" and arch IN ("%s")' % \
778                       (pkg, '","'.join(arches))
779                 for row in con.execute(sql):
780                     tmpver = "%s-%s" % (row[0], row[1])
781                     if tmpver > ver:
782                         ver = tmpver
783                         pkgpath = "%s" % row[2]
784                         target_repo = repo
785                     break
786             else:
787                 sql = 'select version, release, location_href from packages ' \
788                       'where name = "%s"' % pkg
789                 for row in con.execute(sql):
790                     tmpver = "%s-%s" % (row[0], row[1])
791                     if tmpver > ver:
792                         ver = tmpver
793                         pkgpath = "%s" % row[2]
794                         target_repo = repo
795                     break
796             con.close()
797     if target_repo:
798         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
799         url = os.path.join(target_repo["baseurl"], pkgpath)
800         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
801         if os.path.exists(filename):
802             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
803             if ret == 0:
804                 return filename
805
806             msger.warning("package %s is damaged: %s" %
807                           (os.path.basename(filename), filename))
808             os.unlink(filename)
809
810         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
811         return pkg
812     else:
813         return None
814
815 def get_source_name(pkg, repometadata):
816
817     def get_bin_name(pkg):
818         m = RPM_RE.match(pkg)
819         if m:
820             return m.group(1)
821         return None
822
823     def get_src_name(srpm):
824         m = SRPM_RE.match(srpm)
825         if m:
826             return m.group(1)
827         return None
828
829     ver = ""
830     target_repo = None
831
832     pkg_name = get_bin_name(pkg)
833     if not pkg_name:
834         return None
835
836     for repo in repometadata:
837         if repo["primary"].endswith(".xml"):
838             root = xmlparse(repo["primary"])
839             ns = root.getroot().tag
840             ns = ns[0:ns.rindex("}")+1]
841             for elm in root.getiterator("%spackage" % ns):
842                 if elm.find("%sname" % ns).text == pkg_name:
843                     if elm.find("%sarch" % ns).text != "src":
844                         version = elm.find("%sversion" % ns)
845                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
846                         if tmpver > ver:
847                             ver = tmpver
848                             fmt = elm.find("%sformat" % ns)
849                             if fmt:
850                                 fns = fmt.getchildren()[0].tag
851                                 fns = fns[0:fns.rindex("}")+1]
852                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
853                                 target_repo = repo
854                         break
855
856         if repo["primary"].endswith(".sqlite"):
857             con = sqlite.connect(repo["primary"])
858             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
859                 tmpver = "%s-%s" % (row[0], row[1])
860                 if tmpver > ver:
861                     pkgpath = "%s" % row[2]
862                     target_repo = repo
863                 break
864             con.close()
865     if target_repo:
866         return get_src_name(pkgpath)
867     else:
868         return None
869
870 def get_pkglist_in_patterns(group, patterns):
871     found = False
872     pkglist = []
873     try:
874         root = xmlparse(patterns)
875     except SyntaxError:
876         raise SyntaxError("%s syntax error." % patterns)
877
878     for elm in list(root.getroot()):
879         ns = elm.tag
880         ns = ns[0:ns.rindex("}")+1]
881         name = elm.find("%sname" % ns)
882         summary = elm.find("%ssummary" % ns)
883         if name.text == group or summary.text == group:
884             found = True
885             break
886
887     if not found:
888         return pkglist
889
890     found = False
891     for requires in list(elm):
892         if requires.tag.endswith("requires"):
893             found = True
894             break
895
896     if not found:
897         return pkglist
898
899     for pkg in list(requires):
900         pkgname = pkg.attrib["name"]
901         if pkgname not in pkglist:
902             pkglist.append(pkgname)
903
904     return pkglist
905
906 def get_pkglist_in_comps(group, comps):
907     found = False
908     pkglist = []
909     try:
910         root = xmlparse(comps)
911     except SyntaxError:
912         raise SyntaxError("%s syntax error." % comps)
913
914     for elm in root.getiterator("group"):
915         id = elm.find("id")
916         name = elm.find("name")
917         if id.text == group or name.text == group:
918             packagelist = elm.find("packagelist")
919             found = True
920             break
921
922     if not found:
923         return pkglist
924
925     for require in elm.getiterator("packagereq"):
926         if require.tag.endswith("packagereq"):
927             pkgname = require.text
928         if pkgname not in pkglist:
929             pkglist.append(pkgname)
930
931     return pkglist
932
933 def is_statically_linked(binary):
934     return ", statically linked, " in runner.outs(['file', binary])
935
936 def setup_qemu_emulator(rootdir, arch):
937     # mount binfmt_misc if it doesn't exist
938     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
939         modprobecmd = find_binary_path("modprobe")
940         runner.show([modprobecmd, "binfmt_misc"])
941     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
942         mountcmd = find_binary_path("mount")
943         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
944
945     # qemu_emulator is a special case, we can't use find_binary_path
946     # qemu emulator should be a statically-linked executable file
947     qemu_emulator = "/usr/bin/qemu-arm"
948     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
949         qemu_emulator = "/usr/bin/qemu-arm-static"
950     if not os.path.exists(qemu_emulator):
951         raise CreatorError("Please install a statically-linked qemu-arm")
952
953     # qemu emulator version check
954     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
955     if arch in armv7_list:  # need qemu (>=0.13.0)
956         qemuout = runner.outs([qemu_emulator, "-h"])
957         m = re.search("version\s*([.\d]+)", qemuout)
958         if m:
959             qemu_version = m.group(1)
960             if qemu_version < "0.13":
961                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
962         else:
963             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
964
965     if not os.path.exists(rootdir + "/usr/bin"):
966         makedirs(rootdir + "/usr/bin")
967     shutil.copy(qemu_emulator, rootdir + "/usr/bin/qemu-arm-static")
968     qemu_emulator = "/usr/bin/qemu-arm-static"
969
970     # disable selinux, selinux will block qemu emulator to run
971     if os.path.exists("/usr/sbin/setenforce"):
972         msger.info('Try to disable selinux')
973         runner.show(["/usr/sbin/setenforce", "0"])
974
975     # unregister it if it has been registered and is a dynamically-linked executable
976     node = "/proc/sys/fs/binfmt_misc/arm"
977     if os.path.exists(node):
978         qemu_unregister_string = "-1\n"
979         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
980         fd.write(qemu_unregister_string)
981         fd.close()
982
983     # register qemu emulator for interpreting other arch executable file
984     if not os.path.exists(node):
985         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
986         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
987         fd.write(qemu_arm_string)
988         fd.close()
989
990     return qemu_emulator
991
992 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
993     def get_source_repometadata(repometadata):
994         src_repometadata=[]
995         for repo in repometadata:
996             if repo["name"].endswith("-source"):
997                 src_repometadata.append(repo)
998         if src_repometadata:
999             return src_repometadata
1000         return None
1001
1002     def get_src_name(srpm):
1003         m = SRPM_RE.match(srpm)
1004         if m:
1005             return m.group(1)
1006         return None
1007
1008     src_repometadata = get_source_repometadata(repometadata)
1009
1010     if not src_repometadata:
1011         msger.warning("No source repo found")
1012         return None
1013
1014     src_pkgs = []
1015     lpkgs_dict = {}
1016     lpkgs_path = []
1017     for repo in src_repometadata:
1018         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1019         lpkgs_path += glob.glob(cachepath)
1020
1021     for lpkg in lpkgs_path:
1022         lpkg_name = get_src_name(os.path.basename(lpkg))
1023         lpkgs_dict[lpkg_name] = lpkg
1024     localpkgs = lpkgs_dict.keys()
1025
1026     cached_count = 0
1027     destdir = instroot+'/usr/src/SRPMS'
1028     if not os.path.exists(destdir):
1029         os.makedirs(destdir)
1030
1031     srcpkgset = set()
1032     for _pkg in pkgs:
1033         srcpkg_name = get_source_name(_pkg, repometadata)
1034         if not srcpkg_name:
1035             continue
1036         srcpkgset.add(srcpkg_name)
1037
1038     for pkg in list(srcpkgset):
1039         if pkg in localpkgs:
1040             cached_count += 1
1041             shutil.copy(lpkgs_dict[pkg], destdir)
1042             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1043         else:
1044             src_pkg = get_package(pkg, src_repometadata, 'src')
1045             if src_pkg:
1046                 shutil.copy(src_pkg, destdir)
1047                 src_pkgs.append(src_pkg)
1048     msger.info("%d source packages gotten from cache" % cached_count)
1049
1050     return src_pkgs
1051
1052 def strip_end(text, suffix):
1053     if not text.endswith(suffix):
1054         return text
1055     return text[:-len(suffix)]