3 # Copyright (c) 2010, 2011 Intel Inc.
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
30 import sqlite3 as sqlite
33 import sqlite3 as sqlite
38 from xml.etree import cElementTree
41 xmlparse = cElementTree.parse
44 from fs_related import *
45 from grabber import myurlgrab
46 from proxy import get_proxy_for
51 RPM_RE = re.compile("(.*)\.(.*) (.*)-(.*)")
52 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
53 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
55 def build_name(kscfg, release=None, prefix = None, suffix = None):
56 """Construct and return an image name string.
58 This is a utility function to help create sensible name and fslabel
59 strings. The name is constructed using the sans-prefix-and-extension
60 kickstart filename and the supplied prefix and suffix.
62 kscfg -- a path to a kickstart file
63 release -- a replacement to suffix for image release
64 prefix -- a prefix to prepend to the name; defaults to None, which causes
66 suffix -- a suffix to append to the name; defaults to None, which causes
67 a YYYYMMDDHHMM suffix to be used
69 Note, if maxlen is less then the len(suffix), you get to keep both pieces.
72 name = os.path.basename(kscfg)
77 if release is not None:
82 suffix = time.strftime("%Y%m%d%H%M")
84 if name.startswith(prefix):
85 name = name[len(prefix):]
87 prefix = "%s-" % prefix if prefix else ""
88 suffix = "-%s" % suffix if suffix else ""
90 ret = prefix + name + suffix
94 """Detect linux distribution, support "meego"
97 support_dists = ('SuSE',
106 (dist, ver, id) = platform.linux_distribution( \
107 supported_dists = support_dists)
109 (dist, ver, id) = platform.dist( \
110 supported_dists = support_dists)
112 return (dist, ver, id)
114 def get_distro_str():
115 """Get composited string for current linux distribution
117 (dist, ver, id) = get_distro()
120 return 'Unknown Linux Distro'
122 distro_str = ' '.join(map(str.strip, (dist, ver, id)))
123 return distro_str.strip()
125 _LOOP_RULE_PTH = None
127 def hide_loopdev_presentation():
128 udev_rules = "80-prevent-loop-present.rules"
130 '/usr/lib/udev/rules.d/',
131 '/lib/udev/rules.d/',
135 global _LOOP_RULE_PTH
137 for rdir in udev_rules_dir:
138 if os.path.exists(rdir):
139 _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
141 if not _LOOP_RULE_PTH:
145 with open(_LOOP_RULE_PTH, 'w') as wf:
146 wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
148 runner.quiet('udevadm trigger')
152 def unhide_loopdev_presentation():
153 global _LOOP_RULE_PTH
155 if not _LOOP_RULE_PTH:
159 os.unlink(_LOOP_RULE_PTH)
160 runner.quiet('udevadm trigger')
164 def extract_rpm(rpmfile, targetdir):
165 rpm2cpio = find_binary_path("rpm2cpio")
166 cpio = find_binary_path("cpio")
171 msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
172 p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
173 p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
174 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
175 (sout, serr) = p2.communicate()
176 msger.verbose(sout or serr)
180 def compressing(fpath, method):
185 if method not in comp_map:
186 raise CreatorError("Unsupport compress format: %s, valid values: %s"
187 % (method, ','.join(comp_map.keys())))
188 cmd = find_binary_path(comp_map[method])
189 rc = runner.show([cmd, "-f", fpath])
191 raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
193 def taring(dstfile, target):
195 basen, ext = os.path.splitext(dstfile)
196 comp = {".tar": None,
197 ".gz": "gz", # for .tar.gz
198 ".bz2": "bz2", # for .tar.bz2
202 # specify tarball file path
205 elif basen.endswith(".tar"):
208 tarpath = basen + ".tar"
209 wf = tarfile.open(tarpath, 'w')
211 if os.path.isdir(target):
212 for item in os.listdir(target):
213 wf.add(os.path.join(target, item), item)
215 wf.add(target, os.path.basename(target))
219 compressing(tarpath, comp)
220 # when dstfile ext is ".tgz" and ".tbz", should rename
221 if not basen.endswith(".tar"):
222 shutil.move("%s.%s" % (tarpath, comp), dstfile)
224 def ziping(dstfile, target):
226 wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
227 if os.path.isdir(target):
228 for item in os.listdir(target):
229 fpath = os.path.join(target, item)
230 if not os.path.isfile(fpath):
232 wf.write(fpath, item, zipfile.ZIP_DEFLATED)
234 wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
246 def packing(dstfile, target):
247 (base, ext) = os.path.splitext(dstfile)
248 if ext in (".gz", ".bz2") and base.endswith(".tar"):
250 if ext not in pack_formats:
251 raise CreatorError("Unsupport pack format: %s, valid values: %s"
252 % (ext, ','.join(pack_formats.keys())))
253 func = pack_formats[ext]
254 # func should be callable
255 func(dstfile, target)
257 def human_size(size):
258 """Return human readable string for Bytes size
264 measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
265 expo = int(math.log(size, 1024))
266 mant = float(size/math.pow(1024, expo))
267 return "{0:.1f}{1:s}".format(mant, measure[expo])
269 def check_space_pre_cp(src, dst):
270 """Check whether disk space is enough before 'cp' like
271 operations, else exception will be raised.
274 srcsize = get_file_size(src) * 1024 * 1024
275 freesize = get_filesystem_avail(dst)
276 if srcsize > freesize:
277 raise CreatorError("space on %s(%s) is not enough for about %s files"
278 % (dst, human_size(freesize), human_size(srcsize)))
280 def calc_hashes(file_path, hash_names, start = 0, end = None):
281 """ Calculate hashes for a file. The 'file_path' argument is the file
282 to calculate hash functions for, 'start' and 'end' are the starting and
283 ending file offset to calculate the has functions for. The 'hash_names'
284 argument is a list of hash names to calculate. Returns the the list
285 of calculated hash values in the hexadecimal form in the same order
289 end = os.path.getsize(file_path)
292 to_read = end - start;
296 for hash_name in hash_names:
297 hashes.append(hashlib.new(hash_name))
299 with open(file_path, "rb") as f:
302 while read < to_read:
303 if read + chunk_size > to_read:
304 chunk_size = to_read - read
305 chunk = f.read(chunk_size)
306 for hash_obj in hashes:
307 hash_obj.update(chunk)
311 for hash_obj in hashes:
312 result.append(hash_obj.hexdigest())
316 def get_md5sum(fpath):
317 return calc_hashes(fpath, ('md5', ))[0]
319 def normalize_ksfile(ksconf, release, arch):
326 if not os.path.exists(ksconf):
331 if not arch or re.match(r'i.86', arch):
334 with open(ksconf) as f:
337 if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
338 msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
339 ksc = ksc.replace("@ARCH@", arch)
340 ksc = ksc.replace("@BUILD_ID@", release)
341 fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
345 msger.debug('new ks path %s' % ksconf)
348 atexit.register(_clrtempks)
352 def _check_mic_chroot(rootdir):
354 return rootdir.rstrip('/') + path
356 release_files = map(_path, [ "/etc/moblin-release",
357 "/etc/meego-release",
358 "/etc/tizen-release"])
360 if not any(map(os.path.exists, release_files)):
361 msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
363 if not glob.glob(rootdir + "/boot/vmlinuz-*"):
364 msger.warning("Failed to find kernel module under %s" % rootdir)
368 def selinux_check(arch, fstypes):
370 getenforce = find_binary_path('getenforce')
374 selinux_status = runner.outs([getenforce])
375 if arch and arch.startswith("arm") and selinux_status == "Enforcing":
376 raise CreatorError("Can't create arm image if selinux is enabled, "
377 "please run 'setenforce 0' to disable selinux")
379 use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
380 if use_btrfs and selinux_status == "Enforcing":
381 raise CreatorError("Can't create btrfs image if selinux is enabled,"
382 " please run 'setenforce 0' to disable selinux")
384 def get_image_type(path):
385 def _get_extension_name(path):
386 match = re.search("(?<=\.)\w+$", path)
388 return match.group(0)
392 if os.path.isdir(path):
393 _check_mic_chroot(path)
405 extension = _get_extension_name(path)
406 if extension in maptab:
407 return maptab[extension]
409 fd = open(path, "rb")
410 file_header = fd.read(1024)
412 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
413 if file_header[0:len(vdi_flag)] == vdi_flag:
416 output = runner.outs(['file', path])
417 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
418 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
419 rawptn = re.compile(r".*x86 boot sector.*")
420 vmdkptn = re.compile(r".*VMware. disk image.*")
421 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
422 ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
423 btrfsimgptn = re.compile(r".*BTRFS.*")
424 if isoptn.match(output):
426 elif usbimgptn.match(output):
427 return maptab["usbimg"]
428 elif rawptn.match(output):
430 elif vmdkptn.match(output):
431 return maptab["vmdk"]
432 elif ext3fsimgptn.match(output):
434 elif ext4fsimgptn.match(output):
436 elif btrfsimgptn.match(output):
439 raise CreatorError("Cannot detect the type of image: %s" % path)
442 def get_file_size(filename):
443 """ Return size in MB unit """
444 cmd = ['du', "-s", "-b", "-B", "1M", filename]
445 rc, duOutput = runner.runtool(cmd)
447 raise CreatorError("Failed to run: %s" % ' '.join(cmd))
448 size1 = int(duOutput.split()[0])
450 cmd = ['du', "-s", "-B", "1M", filename]
451 rc, duOutput = runner.runtool(cmd)
453 raise CreatorError("Failed to run: %s" % ' '.join(cmd))
455 size2 = int(duOutput.split()[0])
456 return max(size1, size2)
459 def get_filesystem_avail(fs):
460 vfstat = os.statvfs(fs)
461 return vfstat.f_bavail * vfstat.f_bsize
463 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
466 raise CreatorError("Invalid destination image format: %s" % dstfmt)
467 msger.debug("converting %s image to %s" % (srcimg, dstimg))
469 path = find_binary_path("qemu-img")
470 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
471 elif srcfmt == "vdi":
472 path = find_binary_path("VBoxManage")
473 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
475 raise CreatorError("Invalid soure image format: %s" % srcfmt)
477 rc = runner.show(argv)
479 msger.debug("convert successful")
481 raise CreatorError("Unable to convert disk to %s" % dstfmt)
483 def uncompress_squashfs(squashfsimg, outdir):
484 """Uncompress file system from squshfs image"""
485 unsquashfs = find_binary_path("unsquashfs")
486 args = [ unsquashfs, "-d", outdir, squashfsimg ]
487 rc = runner.show(args)
489 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
491 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
492 """ FIXME: use the dir in mic.conf instead """
495 return tempfile.mkdtemp(dir = dir, prefix = prefix)
497 def get_repostrs_from_ks(ks):
498 def _get_temp_reponame(baseurl):
499 md5obj = hashlib.md5(baseurl)
500 tmpreponame = "%s" % md5obj.hexdigest()
505 for repodata in ks.handler.repo.repoList:
510 'includepkgs', # val is list
511 'excludepkgs', # val is list
523 if hasattr(repodata, attr) and getattr(repodata, attr):
524 repo[attr] = getattr(repodata, attr)
526 if 'name' not in repo:
527 repo['name'] = _get_temp_reponame(repodata.baseurl)
529 kickstart_repos.append(repo)
531 return kickstart_repos
533 def _get_uncompressed_data_from_url(url, filename, proxies):
534 filename = myurlgrab(url, filename, proxies)
536 if filename.endswith(".gz"):
538 runner.quiet(['gunzip', "-f", filename])
539 elif filename.endswith(".bz2"):
541 runner.quiet(['bunzip2', "-f", filename])
543 filename = filename.replace(suffix, "")
546 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
547 sumtype=None, checksum=None):
548 url = os.path.join(baseurl, filename)
549 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
550 if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
551 filename = os.path.splitext(filename_tmp)[0]
553 filename = filename_tmp
554 if sumtype and checksum and os.path.exists(filename):
556 sumcmd = find_binary_path("%ssum" % sumtype)
560 file_checksum = runner.outs([sumcmd, filename]).split()[0]
562 if file_checksum and file_checksum == checksum:
565 return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
567 def get_metadata_from_repos(repos, cachedir):
568 my_repo_metadata = []
570 reponame = repo['name']
571 baseurl = repo['baseurl']
575 proxy = repo['proxy']
577 proxy = get_proxy_for(baseurl)
581 proxies = {str(baseurl.split(":")[0]):str(proxy)}
583 makedirs(os.path.join(cachedir, reponame))
584 url = os.path.join(baseurl, "repodata/repomd.xml")
585 filename = os.path.join(cachedir, reponame, 'repomd.xml')
586 repomd = myurlgrab(url, filename, proxies)
588 root = xmlparse(repomd)
590 raise CreatorError("repomd.xml syntax error.")
592 ns = root.getroot().tag
593 ns = ns[0:ns.rindex("}")+1]
599 for elm in root.getiterator("%sdata" % ns):
600 if elm.attrib["type"] == "patterns":
601 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
602 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
603 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
606 for elm in root.getiterator("%sdata" % ns):
607 if elm.attrib["type"] in ("group_gz", "group"):
608 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
609 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
610 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
614 for elm in root.getiterator("%sdata" % ns):
615 if elm.attrib["type"] in ("primary_db", "primary"):
616 primary_type = elm.attrib["type"]
617 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
618 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
619 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
625 for item in ("primary", "patterns", "comps"):
626 if item not in filepaths:
627 filepaths[item] = None
629 if not filepaths[item]:
631 filepaths[item] = _get_metadata_from_repo(baseurl,
641 repokey = _get_metadata_from_repo(baseurl,
645 "repodata/repomd.xml.key")
648 msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
650 my_repo_metadata.append({"name":reponame,
653 "primary":filepaths['primary'],
656 "patterns":filepaths['patterns'],
657 "comps":filepaths['comps'],
660 return my_repo_metadata
662 def get_rpmver_in_repo(repometadata):
663 for repo in repometadata:
664 if repo["primary"].endswith(".xml"):
665 root = xmlparse(repo["primary"])
666 ns = root.getroot().tag
667 ns = ns[0:ns.rindex("}")+1]
670 for elm in root.getiterator("%spackage" % ns):
671 if elm.find("%sname" % ns).text == 'rpm':
672 for node in elm.getchildren():
673 if node.tag == "%sversion" % ns:
674 versionlist.append(node.attrib['ver'])
680 key = lambda ver: map(int, ver.split('.')))).next()
682 elif repo["primary"].endswith(".sqlite"):
683 con = sqlite.connect(repo["primary"])
684 for row in con.execute("select version from packages where "
685 "name=\"rpm\" ORDER by version DESC"):
691 def get_arch(repometadata):
692 def uniqarch(archlist=[]):
694 for i in range(len(archlist)):
695 if archlist[i] not in rpmmisc.archPolicies.keys():
699 while j < len(uniq_arch):
700 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
703 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
705 uniq_arch[j] = archlist[i]
708 uniq_arch.remove(uniq_arch[j])
712 uniq_arch.append(archlist[i])
719 for repo in repometadata:
721 if repo["primary"].endswith(".xml"):
722 root = xmlparse(repo["primary"])
723 ns = root.getroot().tag
724 ns = ns[0:ns.rindex("}")+1]
725 for elm in root.getiterator("%spackage" % ns):
726 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
727 arch = elm.find("%sarch" % ns).text
728 if arch not in archlist:
729 archlist.append(arch)
730 elif repo["primary"].endswith(".sqlite"):
731 con = sqlite.connect(repo["primary"])
732 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
733 if row[0] not in archlist:
734 archlist.append(row[0])
738 uniq_arch = uniqarch(archlist)
739 if not ret_uniq_arch and len(uniq_arch) == 1:
740 ret_uniq_arch = uniq_arch
741 ret_arch_list += uniq_arch
743 ret_arch_list = uniqarch(ret_arch_list)
744 return ret_uniq_arch, ret_arch_list
746 def get_package(pkg, repometadata, arch = None):
751 elif arch not in rpmmisc.archPolicies:
754 arches = rpmmisc.archPolicies[arch].split(':')
755 arches.append('noarch')
757 for repo in repometadata:
758 if repo["primary"].endswith(".xml"):
759 root = xmlparse(repo["primary"])
760 ns = root.getroot().tag
761 ns = ns[0:ns.rindex("}")+1]
762 for elm in root.getiterator("%spackage" % ns):
763 if elm.find("%sname" % ns).text == pkg:
764 if elm.find("%sarch" % ns).text in arches:
765 version = elm.find("%sversion" % ns)
766 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
769 location = elm.find("%slocation" % ns)
770 pkgpath = "%s" % location.attrib['href']
773 if repo["primary"].endswith(".sqlite"):
774 con = sqlite.connect(repo["primary"])
776 sql = 'select version, release, location_href from packages ' \
777 'where name = "%s" and arch IN ("%s")' % \
778 (pkg, '","'.join(arches))
779 for row in con.execute(sql):
780 tmpver = "%s-%s" % (row[0], row[1])
783 pkgpath = "%s" % row[2]
787 sql = 'select version, release, location_href from packages ' \
788 'where name = "%s"' % pkg
789 for row in con.execute(sql):
790 tmpver = "%s-%s" % (row[0], row[1])
793 pkgpath = "%s" % row[2]
798 makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
799 url = os.path.join(target_repo["baseurl"], pkgpath)
800 filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
801 if os.path.exists(filename):
802 ret = rpmmisc.checkRpmIntegrity('rpm', filename)
806 msger.warning("package %s is damaged: %s" %
807 (os.path.basename(filename), filename))
810 pkg = myurlgrab(str(url), filename, target_repo["proxies"])
815 def get_source_name(pkg, repometadata):
817 def get_bin_name(pkg):
818 m = RPM_RE.match(pkg)
823 def get_src_name(srpm):
824 m = SRPM_RE.match(srpm)
832 pkg_name = get_bin_name(pkg)
836 for repo in repometadata:
837 if repo["primary"].endswith(".xml"):
838 root = xmlparse(repo["primary"])
839 ns = root.getroot().tag
840 ns = ns[0:ns.rindex("}")+1]
841 for elm in root.getiterator("%spackage" % ns):
842 if elm.find("%sname" % ns).text == pkg_name:
843 if elm.find("%sarch" % ns).text != "src":
844 version = elm.find("%sversion" % ns)
845 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
848 fmt = elm.find("%sformat" % ns)
850 fns = fmt.getchildren()[0].tag
851 fns = fns[0:fns.rindex("}")+1]
852 pkgpath = fmt.find("%ssourcerpm" % fns).text
856 if repo["primary"].endswith(".sqlite"):
857 con = sqlite.connect(repo["primary"])
858 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
859 tmpver = "%s-%s" % (row[0], row[1])
861 pkgpath = "%s" % row[2]
866 return get_src_name(pkgpath)
870 def get_pkglist_in_patterns(group, patterns):
874 root = xmlparse(patterns)
876 raise SyntaxError("%s syntax error." % patterns)
878 for elm in list(root.getroot()):
880 ns = ns[0:ns.rindex("}")+1]
881 name = elm.find("%sname" % ns)
882 summary = elm.find("%ssummary" % ns)
883 if name.text == group or summary.text == group:
891 for requires in list(elm):
892 if requires.tag.endswith("requires"):
899 for pkg in list(requires):
900 pkgname = pkg.attrib["name"]
901 if pkgname not in pkglist:
902 pkglist.append(pkgname)
906 def get_pkglist_in_comps(group, comps):
910 root = xmlparse(comps)
912 raise SyntaxError("%s syntax error." % comps)
914 for elm in root.getiterator("group"):
916 name = elm.find("name")
917 if id.text == group or name.text == group:
918 packagelist = elm.find("packagelist")
925 for require in elm.getiterator("packagereq"):
926 if require.tag.endswith("packagereq"):
927 pkgname = require.text
928 if pkgname not in pkglist:
929 pkglist.append(pkgname)
933 def is_statically_linked(binary):
934 return ", statically linked, " in runner.outs(['file', binary])
936 def setup_qemu_emulator(rootdir, arch):
937 # mount binfmt_misc if it doesn't exist
938 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
939 modprobecmd = find_binary_path("modprobe")
940 runner.show([modprobecmd, "binfmt_misc"])
941 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
942 mountcmd = find_binary_path("mount")
943 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
945 # qemu_emulator is a special case, we can't use find_binary_path
946 # qemu emulator should be a statically-linked executable file
947 qemu_emulator = "/usr/bin/qemu-arm"
948 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
949 qemu_emulator = "/usr/bin/qemu-arm-static"
950 if not os.path.exists(qemu_emulator):
951 raise CreatorError("Please install a statically-linked qemu-arm")
953 # qemu emulator version check
954 armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
955 if arch in armv7_list: # need qemu (>=0.13.0)
956 qemuout = runner.outs([qemu_emulator, "-h"])
957 m = re.search("version\s*([.\d]+)", qemuout)
959 qemu_version = m.group(1)
960 if qemu_version < "0.13":
961 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
963 msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
965 if not os.path.exists(rootdir + "/usr/bin"):
966 makedirs(rootdir + "/usr/bin")
967 shutil.copy(qemu_emulator, rootdir + "/usr/bin/qemu-arm-static")
968 qemu_emulator = "/usr/bin/qemu-arm-static"
970 # disable selinux, selinux will block qemu emulator to run
971 if os.path.exists("/usr/sbin/setenforce"):
972 msger.info('Try to disable selinux')
973 runner.show(["/usr/sbin/setenforce", "0"])
975 # unregister it if it has been registered and is a dynamically-linked executable
976 node = "/proc/sys/fs/binfmt_misc/arm"
977 if os.path.exists(node):
978 qemu_unregister_string = "-1\n"
979 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
980 fd.write(qemu_unregister_string)
983 # register qemu emulator for interpreting other arch executable file
984 if not os.path.exists(node):
985 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
986 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
987 fd.write(qemu_arm_string)
992 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
993 def get_source_repometadata(repometadata):
995 for repo in repometadata:
996 if repo["name"].endswith("-source"):
997 src_repometadata.append(repo)
999 return src_repometadata
1002 def get_src_name(srpm):
1003 m = SRPM_RE.match(srpm)
1008 src_repometadata = get_source_repometadata(repometadata)
1010 if not src_repometadata:
1011 msger.warning("No source repo found")
1017 for repo in src_repometadata:
1018 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1019 lpkgs_path += glob.glob(cachepath)
1021 for lpkg in lpkgs_path:
1022 lpkg_name = get_src_name(os.path.basename(lpkg))
1023 lpkgs_dict[lpkg_name] = lpkg
1024 localpkgs = lpkgs_dict.keys()
1027 destdir = instroot+'/usr/src/SRPMS'
1028 if not os.path.exists(destdir):
1029 os.makedirs(destdir)
1033 srcpkg_name = get_source_name(_pkg, repometadata)
1036 srcpkgset.add(srcpkg_name)
1038 for pkg in list(srcpkgset):
1039 if pkg in localpkgs:
1041 shutil.copy(lpkgs_dict[pkg], destdir)
1042 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1044 src_pkg = get_package(pkg, src_repometadata, 'src')
1046 shutil.copy(src_pkg, destdir)
1047 src_pkgs.append(src_pkg)
1048 msger.info("%d source packages gotten from cache" % cached_count)
1052 def strip_end(text, suffix):
1053 if not text.endswith(suffix):
1055 return text[:-len(suffix)]