71315d67a39c96d74641fb4f75ae0919f8d62a43
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18 import os
19 import sys
20 import time
21 import tempfile
22 import re
23 import shutil
24 import glob
25 import hashlib
26 import subprocess
27 import platform
28 import rpmmisc
29 import hashlib
30 import sqlite3 as sqlite
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from errors import *
44 from fs_related import *
45 from rpmmisc import myurlgrab
46 from proxy import get_proxy_for
47 import runner
48
49 from mic import msger
50
51 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
52 RPM_FMT = "%(name)s.%(arch)s %(ver_rel)s"
53 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
54
55 def build_name(kscfg, prefix = None, suffix = None, maxlen = None):
56     """Construct and return an image name string.
57
58     This is a utility function to help create sensible name and fslabel
59     strings. The name is constructed using the sans-prefix-and-extension
60     kickstart filename and the supplied prefix and suffix.
61
62     If the name exceeds the maxlen length supplied, the prefix is first dropped
63     and then the kickstart filename portion is reduced until it fits. In other
64     words, the suffix takes precedence over the kickstart portion and the
65     kickstart portion takes precedence over the prefix.
66
67     kscfg -- a path to a kickstart file
68     prefix -- a prefix to prepend to the name; defaults to None, which causes
69               no prefix to be used
70     suffix -- a suffix to append to the name; defaults to None, which causes
71               a YYYYMMDDHHMM suffix to be used
72     maxlen -- the maximum length for the returned string; defaults to None,
73               which means there is no restriction on the name length
74
75     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
76
77     """
78     name = os.path.basename(kscfg)
79     idx = name.rfind('.')
80     if idx >= 0:
81         name = name[:idx]
82
83     if prefix is None:
84         prefix = ""
85     if suffix is None:
86         suffix = time.strftime("%Y%m%d%H%M")
87
88     if name.startswith(prefix):
89         name = name[len(prefix):]
90
91     ret = prefix + name + "-" + suffix
92     if not maxlen is None and len(ret) > maxlen:
93         ret = name[:maxlen - len(suffix) - 1] + "-" + suffix
94
95     return ret
96
97 def get_distro():
98     """Detect linux distribution, support "meego"
99     """
100
101     support_dists = ('SuSE',
102                      'debian',
103                      'fedora',
104                      'redhat',
105                      'centos',
106                      'meego',
107                      'moblin',
108                      'tizen')
109     try:
110         (dist, ver, id) = platform.linux_distribution( \
111                               supported_dists = support_dists)
112     except:
113         (dist, ver, id) = platform.dist( \
114                               supported_dists = support_dists)
115
116     return (dist, ver, id)
117
118 def get_distro_str():
119     """Get composited string for current linux distribution
120     """
121     (dist, ver, id) = get_distro()
122
123     if not dist:
124         return 'Unknown Linux Distro'
125     else:
126         distro_str = ' '.join(map(str.strip, (dist, ver, id)))
127         return distro_str.strip()
128
129 _LOOP_RULE_PTH = None
130 def hide_loopdev_presentation():
131     udev_rules = "80-prevent-loop-present.rules"
132     udev_rules_dir = [
133                        '/usr/lib/udev/rules.d/',
134                        '/lib/udev/rules.d/',
135                        '/etc/udev/rules.d/'
136                      ]
137
138     for rdir in udev_rules_dir:
139         if os.path.exists(rdir):
140             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
141
142     if not _LOOP_RULE_PTH:
143         return
144
145     try:
146         with open(_LOOP_RULE_PTH, 'w') as wf:
147             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
148
149         runner.quiet('udevadm trigger')
150     except:
151         pass
152
153 def unhide_loopdev_presentation():
154     if not _LOOP_RULE_PTH:
155         return
156
157     try:
158         os.unlink(_LOOP_RULE_PTH)
159         runner.quiet('udevadm trigger')
160     except:
161         pass
162
163 def extract_rpm(rpmfile, targetdir):
164     rpm2cpio = find_binary_path("rpm2cpio")
165     cpio = find_binary_path("cpio")
166
167     olddir = os.getcwd()
168     os.chdir(targetdir)
169
170     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
171     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
172     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
173                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
174     (sout, serr) = p2.communicate()
175     msger.verbose(sout or serr)
176
177     os.chdir(olddir)
178
179 def compressing(fpath, method):
180     comp_map = {
181         "gz": "gzip",
182         "bz2": "bzip2"
183     }
184     if method not in comp_map:
185         raise CreatorError("Unsupport compress format: %s, valid values: %s"
186                            % (method, ','.join(comp_map.keys())))
187     cmd = find_binary_path(comp_map[method])
188     rc = runner.show([cmd, "-f", fpath])
189     if rc:
190         raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
191
192 def taring(dstfile, target):
193     import tarfile
194     basen, ext = os.path.splitext(dstfile)
195     comp = {".tar": None,
196             ".gz": "gz", # for .tar.gz
197             ".bz2": "bz2", # for .tar.bz2
198             ".tgz": "gz",
199             ".tbz": "bz2"}[ext]
200
201     # specify tarball file path
202     if not comp:
203         tarpath = dstfile
204     elif basen.endswith(".tar"):
205         tarpath = basen
206     else:
207         tarpath = basen + ".tar"
208     wf = tarfile.open(tarpath, 'w')
209
210     if os.path.isdir(target):
211         for item in os.listdir(target):
212             wf.add(os.path.join(target, item), item)
213     else:
214         wf.add(target, os.path.basename(target))
215     wf.close()
216
217     if comp:
218         compressing(tarpath, comp)
219         # when dstfile ext is ".tgz" and ".tbz", should rename
220         if not basen.endswith(".tar"):
221             shutil.move("%s.%s" % (tarpath, comp), dstfile)
222
223 def ziping(dstfile, target):
224     import zipfile
225     wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
226     if os.path.isdir(target):
227         for item in os.listdir(target):
228             fpath = os.path.join(target, item)
229             if not os.path.isfile(fpath):
230                 continue
231             wf.write(fpath, item, zipfile.ZIP_DEFLATED)
232     else:
233         wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
234     wf.close()
235
236 pack_formats = {
237     ".tar": taring,
238     ".tar.gz": taring,
239     ".tar.bz2": taring,
240     ".tgz": taring,
241     ".tbz": taring,
242     ".zip": ziping,
243 }
244
245 def packing(dstfile, target):
246     (base, ext) = os.path.splitext(dstfile)
247     if ext in (".gz", ".bz2") and base.endswith(".tar"):
248         ext = ".tar" + ext
249     if ext not in pack_formats:
250         raise CreatorError("Unsupport pack format: %s, valid values: %s"
251                            % (ext, ','.join(pack_formats.keys())))
252     func = pack_formats[ext]
253     # func should be callable
254     func(dstfile, target)
255
256 def human_size(size):
257     """Return human readable string for Bytes size
258     """
259
260     if size <= 0:
261         return "0M"
262     import math
263     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
264     expo = int(math.log(size, 1024))
265     mant = float(size/math.pow(1024, expo))
266     return "{0:.1f}{1:s}".format(mant, measure[expo])
267
268 def check_space_pre_cp(src, dst):
269     """Check whether disk space is enough before 'cp' like
270     operations, else exception will be raised.
271     """
272
273     srcsize  = get_file_size(src) * 1024 * 1024
274     freesize = get_filesystem_avail(dst)
275     if srcsize > freesize:
276         raise CreatorError("space on %s(%s) is not enough for about %s files"
277                            % (dst, human_size(freesize), human_size(srcsize)))
278
279 def calc_hashes(file_path, hash_names, start = 0, end = None):
280     """ Calculate hashes for a file. The 'file_path' argument is the file
281     to calculate hash functions for, 'start' and 'end' are the starting and
282     ending file offset to calculate the has functions for. The 'hash_names'
283     argument is a list of hash names to calculate. Returns the the list
284     of calculated hash values in the hexadecimal form in the same order
285     as 'hash_names'.
286     """
287     if end == None:
288         end = os.path.getsize(file_path)
289
290     chunk_size = 65536
291     to_read = end - start;
292     read = 0
293
294     hashes = []
295     for hash_name in hash_names:
296         hashes.append(hashlib.new(hash_name))
297
298     with open(file_path, "rb") as f:
299         f.seek(start)
300
301         while read < to_read:
302             if read + chunk_size > to_read:
303                 chunk_size = to_read - read
304             chunk = f.read(chunk_size)
305             for hash_obj in hashes:
306                 hash_obj.update(chunk)
307             read += chunk_size
308
309     result = []
310     for hash_obj in hashes:
311         result.append(hash_obj.hexdigest())
312
313     return result
314
315 def get_md5sum(fpath):
316     return calc_hashes(fpath, ('md5', ))[0]
317
318 def normalize_ksfile(ksconf, release, arch):
319     def _clrtempks():
320         try:
321             os.unlink(ksconf)
322         except:
323             pass
324
325     if not os.path.exists(ksconf):
326         return
327
328     if not release:
329         release = "latest"
330     if not arch or re.match(r'i.86', arch):
331         arch = "ia32"
332
333     with open(ksconf) as f:
334         ksc = f.read()
335
336     if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
337         msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
338         ksc = ksc.replace("@ARCH@", arch)
339         ksc = ksc.replace("@BUILD_ID@", release)
340         fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
341         os.write(fd, ksc)
342         os.close(fd)
343
344         msger.debug('new ks path %s' % ksconf)
345
346         import atexit
347         atexit.register(_clrtempks)
348
349     return ksconf
350
351 def _check_mic_chroot(rootdir):
352     def _path(path):
353         return rootdir.rstrip('/') + path
354
355     release_files = map(_path, [ "/etc/moblin-release",
356                                  "/etc/meego-release",
357                                  "/etc/tizen-release"])
358
359     if not any(map(os.path.exists, release_files)):
360         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
361
362     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
363         msger.warning("Failed to find kernel module under %s" % rootdir)
364
365     return
366
367 def selinux_check(arch, fstypes):
368     try:
369         getenforce = find_binary_path('getenforce')
370     except CreatorError:
371         return
372
373     selinux_status = runner.outs([getenforce])
374     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
375         raise CreatorError("Can't create arm image if selinux is enabled, "
376                            "please run 'setenforce 0' to disable selinux")
377
378     use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
379     if use_btrfs and selinux_status == "Enforcing":
380         raise CreatorError("Can't create btrfs image if selinux is enabled,"
381                            " please run 'setenforce 0' to disable selinux")
382
383 def get_image_type(path):
384     def _get_extension_name(path):
385         match = re.search("(?<=\.)\w+$", path)
386         if match:
387             return match.group(0)
388         else:
389             return None
390
391     if os.path.isdir(path):
392         _check_mic_chroot(path)
393         return "fs"
394
395     maptab = {
396               "tar": "loop",
397               "raw":"raw",
398               "vmdk":"vmdk",
399               "vdi":"vdi",
400               "iso":"livecd",
401               "usbimg":"liveusb",
402              }
403
404     extension = _get_extension_name(path)
405     if extension in maptab:
406         return maptab[extension]
407
408     fd = open(path, "rb")
409     file_header = fd.read(1024)
410     fd.close()
411     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
412     if file_header[0:len(vdi_flag)] == vdi_flag:
413         return maptab["vdi"]
414
415     output = runner.outs(['file', path])
416     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
417     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
418     rawptn = re.compile(r".*x86 boot sector.*")
419     vmdkptn = re.compile(r".*VMware. disk image.*")
420     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
421     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
422     btrfsimgptn = re.compile(r".*BTRFS.*")
423     if isoptn.match(output):
424         return maptab["iso"]
425     elif usbimgptn.match(output):
426         return maptab["usbimg"]
427     elif rawptn.match(output):
428         return maptab["raw"]
429     elif vmdkptn.match(output):
430         return maptab["vmdk"]
431     elif ext3fsimgptn.match(output):
432         return "ext3fsimg"
433     elif ext4fsimgptn.match(output):
434         return "ext4fsimg"
435     elif btrfsimgptn.match(output):
436         return "btrfsimg"
437     else:
438         raise CreatorError("Cannot detect the type of image: %s" % path)
439
440 def get_file_size(file):
441     """ Return size in MB unit """
442     rc, duOutput  = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
443     if rc != 0:
444         raise CreatorError("Failed to run %s" % du)
445
446     size1 = int(duOutput.split()[0])
447     rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
448     if rc != 0:
449         raise CreatorError("Failed to run %s" % du)
450
451     size2 = int(duOutput.split()[0])
452     if size1 > size2:
453         return size1
454     else:
455         return size2
456
457 def get_filesystem_avail(fs):
458     vfstat = os.statvfs(fs)
459     return vfstat.f_bavail * vfstat.f_bsize
460
461 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
462     #convert disk format
463     if dstfmt != "raw":
464         raise CreatorError("Invalid destination image format: %s" % dstfmt)
465     msger.debug("converting %s image to %s" % (srcimg, dstimg))
466     if srcfmt == "vmdk":
467         path = find_binary_path("qemu-img")
468         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
469     elif srcfmt == "vdi":
470         path = find_binary_path("VBoxManage")
471         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
472     else:
473         raise CreatorError("Invalid soure image format: %s" % srcfmt)
474
475     rc = runner.show(argv)
476     if rc == 0:
477         msger.debug("convert successful")
478     if rc != 0:
479         raise CreatorError("Unable to convert disk to %s" % dstfmt)
480
481 def uncompress_squashfs(squashfsimg, outdir):
482     """Uncompress file system from squshfs image"""
483     unsquashfs = find_binary_path("unsquashfs")
484     args = [ unsquashfs, "-d", outdir, squashfsimg ]
485     rc = runner.show(args)
486     if (rc != 0):
487         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
488
489 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
490     """ FIXME: use the dir in mic.conf instead """
491
492     makedirs(dir)
493     return tempfile.mkdtemp(dir = dir, prefix = prefix)
494
495 def get_repostrs_from_ks(ks):
496     def _get_temp_reponame(baseurl):
497         md5obj = hashlib.md5(baseurl)
498         tmpreponame = "%s" % md5obj.hexdigest()
499         return tmpreponame
500
501     kickstart_repos = []
502
503     for repodata in ks.handler.repo.repoList:
504         repo = {}
505         for attr in ('name',
506                      'baseurl',
507                      'mirrorlist',
508                      'includepkgs', # val is list
509                      'excludepkgs', # val is list
510                      'cost',    # int
511                      'priority',# int
512                      'save',
513                      'proxy',
514                      'proxyuser',
515                      'proxypasswd',
516                      'proxypasswd',
517                      'debuginfo',
518                      'source',
519                      'gpgkey',
520                      'ssl_verify'):
521             if hasattr(repodata, attr) and getattr(repodata, attr):
522                 repo[attr] = getattr(repodata, attr)
523
524         if 'name' not in repo:
525             repo['name'] = _get_temp_reponame(repodata.baseurl)
526
527         kickstart_repos.append(repo)
528
529     return kickstart_repos
530
531 def _get_uncompressed_data_from_url(url, filename, proxies):
532     filename = myurlgrab(url, filename, proxies)
533     suffix = None
534     if filename.endswith(".gz"):
535         suffix = ".gz"
536         runner.quiet(['gunzip', "-f", filename])
537     elif filename.endswith(".bz2"):
538         suffix = ".bz2"
539         runner.quiet(['bunzip2', "-f", filename])
540     if suffix:
541         filename = filename.replace(suffix, "")
542     return filename
543
544 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
545                             sumtype=None, checksum=None):
546     url = os.path.join(baseurl, filename)
547     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
548     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
549         filename = os.path.splitext(filename_tmp)[0]
550     else:
551         filename = filename_tmp
552     if sumtype and checksum and os.path.exists(filename):
553         try:
554             sumcmd = find_binary_path("%ssum" % sumtype)
555         except:
556             file_checksum = None
557         else:
558             file_checksum = runner.outs([sumcmd, filename]).split()[0]
559
560         if file_checksum and file_checksum == checksum:
561             return filename
562
563     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
564
565 def get_metadata_from_repos(repos, cachedir):
566     my_repo_metadata = []
567     for repo in repos:
568         reponame = repo['name']
569         baseurl  = repo['baseurl']
570
571
572         if 'proxy' in repo:
573             proxy = repo['proxy']
574         else:
575             proxy = get_proxy_for(baseurl)
576
577         proxies = None
578         if proxy:
579            proxies = {str(baseurl.split(":")[0]):str(proxy)}
580
581         makedirs(os.path.join(cachedir, reponame))
582         url = os.path.join(baseurl, "repodata/repomd.xml")
583         filename = os.path.join(cachedir, reponame, 'repomd.xml')
584         repomd = myurlgrab(url, filename, proxies)
585         try:
586             root = xmlparse(repomd)
587         except SyntaxError:
588             raise CreatorError("repomd.xml syntax error.")
589
590         ns = root.getroot().tag
591         ns = ns[0:ns.rindex("}")+1]
592
593         filepaths = {}
594         checksums = {}
595         sumtypes = {}
596
597         for elm in root.getiterator("%sdata" % ns):
598             if elm.attrib["type"] == "patterns":
599                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
600                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
601                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
602                 break
603
604         for elm in root.getiterator("%sdata" % ns):
605             if elm.attrib["type"] in ("group_gz", "group"):
606                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
607                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
608                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
609                 break
610
611         primary_type = None
612         for elm in root.getiterator("%sdata" % ns):
613             if elm.attrib["type"] in ("primary_db", "primary"):
614                 primary_type = elm.attrib["type"]
615                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
616                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
617                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
618                 break
619
620         if not primary_type:
621             continue
622
623         for item in ("primary", "patterns", "comps"):
624             if item not in filepaths:
625                 filepaths[item] = None
626                 continue
627             if not filepaths[item]:
628                 continue
629             filepaths[item] = _get_metadata_from_repo(baseurl,
630                                                       proxies,
631                                                       cachedir,
632                                                       reponame,
633                                                       filepaths[item],
634                                                       sumtypes[item],
635                                                       checksums[item])
636
637         """ Get repo key """
638         try:
639             repokey = _get_metadata_from_repo(baseurl,
640                                               proxies,
641                                               cachedir,
642                                               reponame,
643                                               "repodata/repomd.xml.key")
644         except CreatorError:
645             repokey = None
646             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
647
648         my_repo_metadata.append({"name":reponame,
649                                  "baseurl":baseurl,
650                                  "repomd":repomd,
651                                  "primary":filepaths['primary'],
652                                  "cachedir":cachedir,
653                                  "proxies":proxies,
654                                  "patterns":filepaths['patterns'],
655                                  "comps":filepaths['comps'],
656                                  "repokey":repokey})
657
658     return my_repo_metadata
659
660 def get_rpmver_in_repo(repometadata):
661     for repo in repometadata:
662         if repo["primary"].endswith(".xml"):
663             root = xmlparse(repo["primary"])
664             ns = root.getroot().tag
665             ns = ns[0:ns.rindex("}")+1]
666
667             versionlist = []
668             for elm in root.getiterator("%spackage" % ns):
669                 if elm.find("%sname" % ns).text == 'rpm':
670                     for node in elm.getchildren():
671                         if node.tag == "%sversion" % ns:
672                             versionlist.append(node.attrib['ver'])
673
674             if versionlist:
675                 return reversed(
676                          sorted(
677                            versionlist,
678                            key = lambda ver: map(int, ver.split('.')))).next()
679
680         elif repo["primary"].endswith(".sqlite"):
681             con = sqlite.connect(repo["primary"])
682             for row in con.execute("select version from packages where "
683                                    "name=\"rpm\" ORDER by version DESC"):
684                 con.close()
685                 return row[0]
686
687     return None
688
689 def get_arch(repometadata):
690     def uniqarch(archlist=[]):
691         uniq_arch = []
692         for i in range(len(archlist)):
693             if archlist[i] not in rpmmisc.archPolicies.keys():
694                 continue
695             need_append = True
696             j = 0
697             while j < len(uniq_arch):
698                 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
699                     need_append = False
700                     break
701                 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
702                     if need_append:
703                         uniq_arch[j] = archlist[i]
704                         need_append = False
705                     else:
706                         uniq_arch.remove(uniq_arch[j])
707                         continue
708                 j += 1
709             if need_append:
710                  uniq_arch.append(archlist[i])
711
712         return uniq_arch
713     
714
715     ret_uniq_arch = []
716     ret_arch_list = []
717     for repo in repometadata:
718         archlist = []
719         if repo["primary"].endswith(".xml"):
720             root = xmlparse(repo["primary"])
721             ns = root.getroot().tag
722             ns = ns[0:ns.rindex("}")+1]
723             for elm in root.getiterator("%spackage" % ns):
724                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
725                     arch = elm.find("%sarch" % ns).text
726                     if arch not in archlist:
727                         archlist.append(arch)
728         elif repo["primary"].endswith(".sqlite"):
729             con = sqlite.connect(repo["primary"])
730             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
731                 if row[0] not in archlist:
732                     archlist.append(row[0])
733
734             con.close()
735
736         uniq_arch = uniqarch(archlist)
737         if not ret_uniq_arch and len(uniq_arch) == 1:
738             ret_uniq_arch = uniq_arch 
739         ret_arch_list += uniq_arch
740
741     ret_arch_list = uniqarch(ret_arch_list)
742     return ret_uniq_arch, ret_arch_list
743
744 def get_package(pkg, repometadata, arch = None):
745     ver = ""
746     target_repo = None
747     if not arch:
748         arches = []
749     elif arch not in rpmmisc.archPolicies:
750         arches = [arch]
751     else:
752         arches = rpmmisc.archPolicies[arch].split(':')
753         arches.append('noarch')
754
755     for repo in repometadata:
756         if repo["primary"].endswith(".xml"):
757             root = xmlparse(repo["primary"])
758             ns = root.getroot().tag
759             ns = ns[0:ns.rindex("}")+1]
760             for elm in root.getiterator("%spackage" % ns):
761                 if elm.find("%sname" % ns).text == pkg:
762                     if elm.find("%sarch" % ns).text in arches:
763                         version = elm.find("%sversion" % ns)
764                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
765                         if tmpver > ver:
766                             ver = tmpver
767                             location = elm.find("%slocation" % ns)
768                             pkgpath = "%s" % location.attrib['href']
769                             target_repo = repo
770                         break
771         if repo["primary"].endswith(".sqlite"):
772             con = sqlite.connect(repo["primary"])
773             if arch:
774                 sql = 'select version, release, location_href from packages ' \
775                       'where name = "%s" and arch IN ("%s")' % \
776                       (pkg, '","'.join(arches))
777                 for row in con.execute(sql):
778                     tmpver = "%s-%s" % (row[0], row[1])
779                     if tmpver > ver:
780                         ver = tmpver
781                         pkgpath = "%s" % row[2]
782                         target_repo = repo
783                     break
784             else:
785                 sql = 'select version, release, location_href from packages ' \
786                       'where name = "%s"' % pkg
787                 for row in con.execute(sql):
788                     tmpver = "%s-%s" % (row[0], row[1])
789                     if tmpver > ver:
790                         ver = tmpver
791                         pkgpath = "%s" % row[2]
792                         target_repo = repo
793                     break
794             con.close()
795     if target_repo:
796         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
797         url = os.path.join(target_repo["baseurl"], pkgpath)
798         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
799         if os.path.exists(filename):
800             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
801             if ret == 0:
802                 return filename
803
804             msger.warning("package %s is damaged: %s" %
805                           (os.path.basename(filename), filename))
806             os.unlink(filename)
807
808         pkg = myurlgrab(str(url), filename, target_repo["proxies"])
809         return pkg
810     else:
811         return None
812
813 def get_source_name(pkg, repometadata):
814
815     def get_bin_name(pkg):
816         m = RPM_RE.match(pkg)
817         if m:
818             return m.group(1)
819         return None
820
821     def get_src_name(srpm):
822         m = SRPM_RE.match(srpm)
823         if m:
824             return m.group(1)
825         return None
826
827     ver = ""
828     target_repo = None
829
830     pkg_name = get_bin_name(pkg)
831     if not pkg_name:
832         return None
833
834     for repo in repometadata:
835         if repo["primary"].endswith(".xml"):
836             root = xmlparse(repo["primary"])
837             ns = root.getroot().tag
838             ns = ns[0:ns.rindex("}")+1]
839             for elm in root.getiterator("%spackage" % ns):
840                 if elm.find("%sname" % ns).text == pkg_name:
841                     if elm.find("%sarch" % ns).text != "src":
842                         version = elm.find("%sversion" % ns)
843                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
844                         if tmpver > ver:
845                             ver = tmpver
846                             fmt = elm.find("%sformat" % ns)
847                             if fmt:
848                                 fns = fmt.getchildren()[0].tag
849                                 fns = fns[0:fns.rindex("}")+1]
850                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
851                                 target_repo = repo
852                         break
853
854         if repo["primary"].endswith(".sqlite"):
855             con = sqlite.connect(repo["primary"])
856             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
857                 tmpver = "%s-%s" % (row[0], row[1])
858                 if tmpver > ver:
859                     pkgpath = "%s" % row[2]
860                     target_repo = repo
861                 break
862             con.close()
863     if target_repo:
864         return get_src_name(pkgpath)
865     else:
866         return None
867
868 def get_pkglist_in_patterns(group, patterns):
869     found = False
870     pkglist = []
871     try:
872         root = xmlparse(patterns)
873     except SyntaxError:
874         raise SyntaxError("%s syntax error." % patterns)
875
876     for elm in list(root.getroot()):
877         ns = elm.tag
878         ns = ns[0:ns.rindex("}")+1]
879         name = elm.find("%sname" % ns)
880         summary = elm.find("%ssummary" % ns)
881         if name.text == group or summary.text == group:
882             found = True
883             break
884
885     if not found:
886         return pkglist
887
888     found = False
889     for requires in list(elm):
890         if requires.tag.endswith("requires"):
891             found = True
892             break
893
894     if not found:
895         return pkglist
896
897     for pkg in list(requires):
898         pkgname = pkg.attrib["name"]
899         if pkgname not in pkglist:
900             pkglist.append(pkgname)
901
902     return pkglist
903
904 def get_pkglist_in_comps(group, comps):
905     found = False
906     pkglist = []
907     try:
908         root = xmlparse(comps)
909     except SyntaxError:
910         raise SyntaxError("%s syntax error." % comps)
911
912     for elm in root.getiterator("group"):
913         id = elm.find("id")
914         name = elm.find("name")
915         if id.text == group or name.text == group:
916             packagelist = elm.find("packagelist")
917             found = True
918             break
919
920     if not found:
921         return pkglist
922
923     for require in elm.getiterator("packagereq"):
924         if require.tag.endswith("packagereq"):
925             pkgname = require.text
926         if pkgname not in pkglist:
927             pkglist.append(pkgname)
928
929     return pkglist
930
931 def is_statically_linked(binary):
932     return ", statically linked, " in runner.outs(['file', binary])
933
934 def setup_qemu_emulator(rootdir, arch):
935     # mount binfmt_misc if it doesn't exist
936     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
937         modprobecmd = find_binary_path("modprobe")
938         runner.show([modprobecmd, "binfmt_misc"])
939     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
940         mountcmd = find_binary_path("mount")
941         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
942
943     # qemu_emulator is a special case, we can't use find_binary_path
944     # qemu emulator should be a statically-linked executable file
945     qemu_emulator = "/usr/bin/qemu-arm"
946     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
947         qemu_emulator = "/usr/bin/qemu-arm-static"
948     if not os.path.exists(qemu_emulator):
949         raise CreatorError("Please install a statically-linked qemu-arm")
950
951     # qemu emulator version check
952     armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
953     if arch in armv7_list:  # need qemu (>=0.13.0)
954         qemuout = runner.outs([qemu_emulator, "-h"])
955         m = re.search("version\s*([.\d]+)", qemuout)
956         if m:
957             qemu_version = m.group(1)
958             if qemu_version < "0.13":
959                 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
960         else:
961             msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
962
963     if not os.path.exists(rootdir + "/usr/bin"):
964         makedirs(rootdir + "/usr/bin")
965     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
966
967     # disable selinux, selinux will block qemu emulator to run
968     if os.path.exists("/usr/sbin/setenforce"):
969         msger.info('Try to disable selinux')
970         runner.show(["/usr/sbin/setenforce", "0"])
971
972     node = "/proc/sys/fs/binfmt_misc/arm"
973     if is_statically_linked(qemu_emulator) and os.path.exists(node):
974         return qemu_emulator
975
976     # unregister it if it has been registered and is a dynamically-linked executable
977     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
978         qemu_unregister_string = "-1\n"
979         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
980         fd.write(qemu_unregister_string)
981         fd.close()
982
983     # register qemu emulator for interpreting other arch executable file
984     if not os.path.exists(node):
985         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
986         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
987         fd.write(qemu_arm_string)
988         fd.close()
989
990     return qemu_emulator
991
992 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
993     def get_source_repometadata(repometadata):
994         src_repometadata=[]
995         for repo in repometadata:
996             if repo["name"].endswith("-source"):
997                 src_repometadata.append(repo)
998         if src_repometadata:
999             return src_repometadata
1000         return None
1001
1002     def get_src_name(srpm):
1003         m = SRPM_RE.match(srpm)
1004         if m:
1005             return m.group(1)
1006         return None
1007
1008     src_repometadata = get_source_repometadata(repometadata)
1009
1010     if not src_repometadata:
1011         msger.warning("No source repo found")
1012         return None
1013
1014     src_pkgs = []
1015     lpkgs_dict = {}
1016     lpkgs_path = []
1017     for repo in src_repometadata:
1018         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1019         lpkgs_path += glob.glob(cachepath)
1020
1021     for lpkg in lpkgs_path:
1022         lpkg_name = get_src_name(os.path.basename(lpkg))
1023         lpkgs_dict[lpkg_name] = lpkg
1024     localpkgs = lpkgs_dict.keys()
1025
1026     cached_count = 0
1027     destdir = instroot+'/usr/src/SRPMS'
1028     if not os.path.exists(destdir):
1029         os.makedirs(destdir)
1030
1031     srcpkgset = set()
1032     for _pkg in pkgs:
1033         srcpkg_name = get_source_name(_pkg, repometadata)
1034         if not srcpkg_name:
1035             continue
1036         srcpkgset.add(srcpkg_name)
1037
1038     for pkg in list(srcpkgset):
1039         if pkg in localpkgs:
1040             cached_count += 1
1041             shutil.copy(lpkgs_dict[pkg], destdir)
1042             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1043         else:
1044             src_pkg = get_package(pkg, src_repometadata, 'src')
1045             if src_pkg:
1046                 shutil.copy(src_pkg, destdir)
1047                 src_pkgs.append(src_pkg)
1048     msger.info("%d source packages gotten from cache" % cached_count)
1049
1050     return src_pkgs
1051
1052 def strip_end(text, suffix):
1053     if not text.endswith(suffix):
1054         return text
1055     return text[:-len(suffix)]