3 # Copyright (c) 2010, 2011 Intel Inc.
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 from __future__ import with_statement
33 import sqlite3 as sqlite
38 from xml.etree import cElementTree
41 xmlparse = cElementTree.parse
44 from mic.archive import get_archive_suffixes
45 from mic.utils.errors import CreatorError, SquashfsError
46 from mic.utils.fs_related import find_binary_path, makedirs
47 from mic.utils.grabber import myurlgrab
48 from mic.utils.proxy import get_proxy_for
49 from mic.utils import runner
50 from mic.utils import rpmmisc
51 from mic.utils.safeurl import SafeURL
54 RPM_RE = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
59 def build_name(kscfg, release=None, prefix = None, suffix = None):
60 """Construct and return an image name string.
62 This is a utility function to help create sensible name and fslabel
63 strings. The name is constructed using the sans-prefix-and-extension
64 kickstart filename and the supplied prefix and suffix.
66 kscfg -- a path to a kickstart file
67 release -- a replacement to suffix for image release
68 prefix -- a prefix to prepend to the name; defaults to None, which causes
70 suffix -- a suffix to append to the name; defaults to None, which causes
71 a YYYYMMDDHHMM suffix to be used
73 Note, if maxlen is less then the len(suffix), you get to keep both pieces.
76 name = os.path.basename(kscfg)
81 if release is not None:
86 suffix = time.strftime("%Y%m%d%H%M")
88 if name.startswith(prefix):
89 name = name[len(prefix):]
91 prefix = "%s-" % prefix if prefix else ""
92 suffix = "-%s" % suffix if suffix else ""
94 ret = prefix + name + suffix
98 """Detect linux distribution, support "meego"
101 support_dists = ('SuSE',
110 (dist, ver, id) = platform.linux_distribution( \
111 supported_dists = support_dists)
113 (dist, ver, id) = platform.dist( \
114 supported_dists = support_dists)
116 return (dist, ver, id)
121 return platform.node()
123 def get_hostname_distro_str():
124 """Get composited string for current linux distribution
126 (dist, ver, id) = get_distro()
127 hostname = get_hostname()
130 return "%s(Unknown Linux Distribution)" % hostname
132 distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
133 return distro_str.strip()
135 _LOOP_RULE_PTH = None
137 def hide_loopdev_presentation():
138 udev_rules = "80-prevent-loop-present.rules"
140 '/usr/lib/udev/rules.d/',
141 '/lib/udev/rules.d/',
145 global _LOOP_RULE_PTH
147 for rdir in udev_rules_dir:
148 if os.path.exists(rdir):
149 _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
151 if not _LOOP_RULE_PTH:
155 with open(_LOOP_RULE_PTH, 'w') as wf:
156 wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
158 runner.quiet('udevadm trigger')
162 def unhide_loopdev_presentation():
163 #global _LOOP_RULE_PTH
165 if not _LOOP_RULE_PTH:
169 os.unlink(_LOOP_RULE_PTH)
170 runner.quiet('udevadm trigger')
174 def extract_rpm(rpmfile, targetdir):
175 rpm2cpio = find_binary_path("rpm2cpio")
176 cpio = find_binary_path("cpio")
181 msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
182 p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
183 p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
184 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
186 (sout, serr) = p2.communicate()
187 msger.verbose(sout or serr)
191 def human_size(size):
192 """Return human readable string for Bytes size
198 measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
199 expo = int(math.log(size, 1024))
200 mant = float(size/math.pow(1024, expo))
201 return "{0:.1f}{1:s}".format(mant, measure[expo])
203 def get_block_size(file_obj):
204 """ Returns block size for file object 'file_obj'. Errors are indicated by
205 the 'IOError' exception. """
207 from fcntl import ioctl
210 # Get the block size of the host file-system for the image file by calling
211 # the FIGETBSZ ioctl (number 2).
212 binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
213 return struct.unpack('I', binary_data)[0]
215 def check_space_pre_cp(src, dst):
216 """Check whether disk space is enough before 'cp' like
217 operations, else exception will be raised.
220 srcsize = get_file_size(src) * 1024 * 1024
221 freesize = get_filesystem_avail(dst)
222 if srcsize > freesize:
223 raise CreatorError("space on %s(%s) is not enough for about %s files"
224 % (dst, human_size(freesize), human_size(srcsize)))
226 def calc_hashes(file_path, hash_names, start = 0, end = None):
227 """ Calculate hashes for a file. The 'file_path' argument is the file
228 to calculate hash functions for, 'start' and 'end' are the starting and
229 ending file offset to calculate the has functions for. The 'hash_names'
230 argument is a list of hash names to calculate. Returns the the list
231 of calculated hash values in the hexadecimal form in the same order
235 end = os.path.getsize(file_path)
238 to_read = end - start
242 for hash_name in hash_names:
243 hashes.append(hashlib.new(hash_name))
245 with open(file_path, "rb") as f:
248 while read < to_read:
249 if read + chunk_size > to_read:
250 chunk_size = to_read - read
251 chunk = f.read(chunk_size)
252 for hash_obj in hashes:
253 hash_obj.update(chunk)
257 for hash_obj in hashes:
258 result.append(hash_obj.hexdigest())
262 def get_md5sum(fpath):
263 return calc_hashes(fpath, ('md5', ))[0]
265 def get_sha1sum(fpath):
266 return calc_hashes(fpath, ('sha1', ))[0]
268 def get_sha256sum(fpath):
269 return calc_hashes(fpath, ('sha256', ))[0]
271 def normalize_ksfile(ksconf, release, arch):
273 Return the name of a normalized ks file in which macro variables
274 @BUILD_ID@ and @ARCH@ are replace with real values.
276 The original ks file is returned if no special macro is used, otherwise
277 a temp file is created and returned, which will be deleted when program
283 if not arch or re.match(r'i.86', arch):
286 with open(ksconf) as f:
289 if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
292 msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
293 ksc = ksc.replace("@ARCH@", arch)
294 ksc = ksc.replace("@BUILD_ID@", release)
296 fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
300 msger.debug('normalized ks file:%s' % ksconf)
302 def remove_temp_ks():
305 except OSError as err:
306 msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
309 atexit.register(remove_temp_ks)
314 def _check_mic_chroot(rootdir):
316 return rootdir.rstrip('/') + path
318 release_files = map(_path, [ "/etc/moblin-release",
319 "/etc/meego-release",
320 "/etc/tizen-release"])
322 if not any(map(os.path.exists, release_files)):
323 msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
325 if not glob.glob(rootdir + "/boot/vmlinuz-*"):
326 msger.warning("Failed to find kernel module under %s" % rootdir)
330 def selinux_check(arch, fstypes):
332 getenforce = find_binary_path('getenforce')
336 selinux_status = runner.outs([getenforce])
337 if arch and arch.startswith("arm") and selinux_status == "Enforcing":
338 raise CreatorError("Can't create arm image if selinux is enabled, "
339 "please run 'setenforce 0' to disable selinux")
341 use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
342 if use_btrfs and selinux_status == "Enforcing":
343 raise CreatorError("Can't create btrfs image if selinux is enabled,"
344 " please run 'setenforce 0' to disable selinux")
346 def get_image_type(path):
347 def _get_extension_name(path):
348 match = re.search("(?<=\.)\w+$", path)
350 return match.group(0)
354 if os.path.isdir(path):
355 _check_mic_chroot(path)
367 extension = _get_extension_name(path)
368 if extension in maptab:
369 return maptab[extension]
371 fd = open(path, "rb")
372 file_header = fd.read(1024)
374 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
375 if file_header[0:len(vdi_flag)] == vdi_flag:
378 #Checking f2fs fs type.
379 blkidcmd = find_binary_path("blkid")
380 out = runner.outs([blkidcmd, '-o', 'value', '-s', 'TYPE', path])
383 output = runner.outs(['file', path])
384 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
385 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
386 rawptn = re.compile(r".*x86 boot sector.*")
387 vmdkptn = re.compile(r".*VMware. disk image.*")
388 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
389 ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
390 btrfsimgptn = re.compile(r".*BTRFS.*")
391 if isoptn.match(output):
393 elif usbimgptn.match(output):
394 return maptab["usbimg"]
395 elif rawptn.match(output):
397 elif vmdkptn.match(output):
398 return maptab["vmdk"]
399 elif ext3fsimgptn.match(output):
401 elif ext4fsimgptn.match(output):
403 elif btrfsimgptn.match(output):
406 raise CreatorError("Cannot detect the type of image: %s" % path)
409 def get_file_size(filename):
410 """ Return size in MB unit """
411 cmd = ['du', "-s", "-b", "-B", "1M", filename]
412 rc, duOutput = runner.runtool(cmd)
414 raise CreatorError("Failed to run: %s" % ' '.join(cmd))
415 size1 = int(duOutput.split()[0])
417 cmd = ['du', "-s", "-B", "1M", filename]
418 rc, duOutput = runner.runtool(cmd)
420 raise CreatorError("Failed to run: %s" % ' '.join(cmd))
422 size2 = int(duOutput.split()[0])
423 return max(size1, size2)
426 def get_filesystem_avail(fs):
427 vfstat = os.statvfs(fs)
428 return vfstat.f_bavail * vfstat.f_bsize
430 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
433 raise CreatorError("Invalid destination image format: %s" % dstfmt)
434 msger.debug("converting %s image to %s" % (srcimg, dstimg))
436 path = find_binary_path("qemu-img")
437 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
438 elif srcfmt == "vdi":
439 path = find_binary_path("VBoxManage")
440 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
442 raise CreatorError("Invalid soure image format: %s" % srcfmt)
444 rc = runner.show(argv)
446 msger.debug("convert successful")
448 raise CreatorError("Unable to convert disk to %s" % dstfmt)
450 def uncompress_squashfs(squashfsimg, outdir):
451 """Uncompress file system from squshfs image"""
452 unsquashfs = find_binary_path("unsquashfs")
453 args = [ unsquashfs, "-d", outdir, squashfsimg ]
454 rc = runner.show(args)
456 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
458 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
459 """ FIXME: use the dir in mic.conf instead """
462 return tempfile.mkdtemp(dir = dir, prefix = prefix)
464 def get_repostrs_from_ks(ks):
465 def _get_temp_reponame(baseurl):
466 md5obj = hashlib.md5(baseurl)
467 tmpreponame = "%s" % md5obj.hexdigest()
472 for repodata in ks.handler.repo.repoList:
477 'includepkgs', # val is list
478 'excludepkgs', # val is list
490 if hasattr(repodata, attr) and getattr(repodata, attr):
491 repo[attr] = getattr(repodata, attr)
493 if 'name' not in repo:
494 repo['name'] = _get_temp_reponame(repodata.baseurl)
495 if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
496 repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
497 getattr(repodata, 'user', None),
498 getattr(repodata, 'passwd', None))
500 kickstart_repos.append(repo)
502 return kickstart_repos
504 def _get_uncompressed_data_from_url(url, filename, proxies):
505 filename = myurlgrab(url.full, filename, proxies)
507 if filename.endswith(".gz"):
509 runner.quiet(['gunzip', "-f", filename])
510 elif filename.endswith(".bz2"):
512 runner.quiet(['bunzip2', "-f", filename])
514 filename = filename.replace(suffix, "")
517 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
518 sumtype=None, checksum=None):
519 url = baseurl.join(filename)
520 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
521 if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
522 filename = os.path.splitext(filename_tmp)[0]
524 filename = filename_tmp
525 if sumtype and checksum and os.path.exists(filename):
527 sumcmd = find_binary_path("%ssum" % sumtype)
531 file_checksum = runner.outs([sumcmd, filename]).split()[0]
533 if file_checksum and file_checksum == checksum:
536 return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
538 def get_metadata_from_repos(repos, cachedir):
539 my_repo_metadata = []
542 baseurl = repo.baseurl
544 if hasattr(repo, 'proxy'):
547 proxy = get_proxy_for(baseurl)
551 proxies = {str(baseurl.split(":")[0]): str(proxy)}
553 makedirs(os.path.join(cachedir, reponame))
554 url = baseurl.join("repodata/repomd.xml")
555 filename = os.path.join(cachedir, reponame, 'repomd.xml')
556 repomd = myurlgrab(url.full, filename, proxies)
558 root = xmlparse(repomd)
560 raise CreatorError("repomd.xml syntax error.")
562 ns = root.getroot().tag
563 ns = ns[0:ns.rindex("}")+1]
569 for elm in root.getiterator("%sdata" % ns):
570 if elm.attrib["type"] == "patterns":
571 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
572 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
573 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
576 for elm in root.getiterator("%sdata" % ns):
577 #"group" type has no "open-checksum" filed, remove it.
578 if elm.attrib["type"] == "group_gz":
579 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
580 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
581 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
585 for elm in root.getiterator("%sdata" % ns):
586 if elm.attrib["type"] in ("primary_db", "primary"):
587 primary_type = elm.attrib["type"]
588 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
589 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
590 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
596 for item in ("primary", "patterns", "comps"):
597 if item not in filepaths:
598 filepaths[item] = None
600 if not filepaths[item]:
602 filepaths[item] = _get_metadata_from_repo(baseurl,
612 repokey = _get_metadata_from_repo(baseurl,
616 "repodata/repomd.xml.key")
619 msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
621 my_repo_metadata.append({"name":reponame,
624 "primary":filepaths['primary'],
627 "patterns":filepaths['patterns'],
628 "comps":filepaths['comps'],
630 "priority":repo.priority})
632 return my_repo_metadata
634 def get_rpmver_in_repo(repometadata):
635 for repo in repometadata:
636 if repo["primary"].endswith(".xml"):
637 root = xmlparse(repo["primary"])
638 ns = root.getroot().tag
639 ns = ns[0:ns.rindex("}")+1]
642 for elm in root.getiterator("%spackage" % ns):
643 if elm.find("%sname" % ns).text == 'rpm':
644 for node in elm.getchildren():
645 if node.tag == "%sversion" % ns:
646 versionlist.append(node.attrib['ver'])
652 key = lambda ver: map(int, ver.split('.')))).next()
654 elif repo["primary"].endswith(".sqlite"):
655 con = sqlite.connect(repo["primary"])
656 for row in con.execute("select version from packages where "
657 "name=\"rpm\" ORDER by version DESC"):
663 def get_arch(repometadata):
665 for repo in repometadata:
666 if repo["primary"].endswith(".xml"):
667 root = xmlparse(repo["primary"])
668 ns = root.getroot().tag
669 ns = ns[0:ns.rindex("}")+1]
670 for elm in root.getiterator("%spackage" % ns):
671 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
672 arch = elm.find("%sarch" % ns).text
673 if arch not in archlist:
674 archlist.append(arch)
675 elif repo["primary"].endswith(".sqlite"):
676 con = sqlite.connect(repo["primary"])
677 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
678 if row[0] not in archlist:
679 archlist.append(row[0])
684 for i in range(len(archlist)):
685 if archlist[i] not in rpmmisc.archPolicies.keys():
689 while j < len(uniq_arch):
690 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
693 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
695 uniq_arch[j] = archlist[i]
698 uniq_arch.remove(uniq_arch[j])
702 uniq_arch.append(archlist[i])
704 return uniq_arch, archlist
706 def get_package(pkg, repometadata, arch = None):
712 elif arch not in rpmmisc.archPolicies:
715 arches = rpmmisc.archPolicies[arch].split(':')
716 arches.append('noarch')
718 for repo in repometadata:
719 if repo["primary"].endswith(".xml"):
720 root = xmlparse(repo["primary"])
721 ns = root.getroot().tag
722 ns = ns[0:ns.rindex("}")+1]
723 for elm in root.getiterator("%spackage" % ns):
724 if elm.find("%sname" % ns).text == pkg and elm.find("%sarch" % ns).text in arches:
725 if repo["priority"] != None:
726 tmpprior = int(repo["priority"])
727 if tmpprior < priority:
729 location = elm.find("%slocation" % ns)
730 pkgpath = "%s" % location.attrib['href']
733 elif tmpprior > priority:
735 version = elm.find("%sversion" % ns)
736 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
739 location = elm.find("%slocation" % ns)
740 pkgpath = "%s" % location.attrib['href']
743 if repo["primary"].endswith(".sqlite"):
744 con = sqlite.connect(repo["primary"])
746 sql = 'select version, release, location_href from packages ' \
747 'where name = "%s" and arch IN ("%s")' % \
748 (pkg, '","'.join(arches))
749 for row in con.execute(sql):
750 tmpver = "%s-%s" % (row[0], row[1])
753 pkgpath = "%s" % row[2]
757 sql = 'select version, release, location_href from packages ' \
758 'where name = "%s"' % pkg
759 for row in con.execute(sql):
760 tmpver = "%s-%s" % (row[0], row[1])
763 pkgpath = "%s" % row[2]
768 makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
769 url = target_repo["baseurl"].join(pkgpath)
770 filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
771 if os.path.exists(filename):
772 ret = rpmmisc.checkRpmIntegrity('rpm', filename)
776 msger.warning("package %s is damaged: %s" %
777 (os.path.basename(filename), filename))
780 pkg = myurlgrab(url.full, filename, target_repo["proxies"])
785 def get_source_name(pkg, repometadata):
787 def get_bin_name(pkg):
788 m = RPM_RE.match(pkg)
793 def get_src_name(srpm):
794 m = SRPM_RE.match(srpm)
802 pkg_name = get_bin_name(pkg)
806 for repo in repometadata:
807 if repo["primary"].endswith(".xml"):
808 root = xmlparse(repo["primary"])
809 ns = root.getroot().tag
810 ns = ns[0:ns.rindex("}")+1]
811 for elm in root.getiterator("%spackage" % ns):
812 if elm.find("%sname" % ns).text == pkg_name:
813 if elm.find("%sarch" % ns).text != "src":
814 version = elm.find("%sversion" % ns)
815 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
818 fmt = elm.find("%sformat" % ns)
820 fns = fmt.getchildren()[0].tag
821 fns = fns[0:fns.rindex("}")+1]
822 pkgpath = fmt.find("%ssourcerpm" % fns).text
826 if repo["primary"].endswith(".sqlite"):
827 con = sqlite.connect(repo["primary"])
828 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
829 tmpver = "%s-%s" % (row[0], row[1])
831 pkgpath = "%s" % row[2]
836 return get_src_name(pkgpath)
840 def get_pkglist_in_patterns(group, patterns):
844 root = xmlparse(patterns)
846 raise SyntaxError("%s syntax error." % patterns)
848 for elm in list(root.getroot()):
850 ns = ns[0:ns.rindex("}")+1]
851 name = elm.find("%sname" % ns)
852 summary = elm.find("%ssummary" % ns)
853 if name.text == group or summary.text == group:
861 for requires in list(elm):
862 if requires.tag.endswith("requires"):
869 for pkg in list(requires):
870 pkgname = pkg.attrib["name"]
871 if pkgname not in pkglist:
872 pkglist.append(pkgname)
876 def get_pkglist_in_comps(group, comps):
880 root = xmlparse(comps)
882 raise SyntaxError("%s syntax error." % comps)
884 for elm in root.getiterator("group"):
886 name = elm.find("name")
887 if id.text == group or name.text == group:
888 packagelist = elm.find("packagelist")
895 for require in elm.getiterator("packagereq"):
896 if require.tag.endswith("packagereq"):
897 pkgname = require.text
898 if pkgname not in pkglist:
899 pkglist.append(pkgname)
903 def is_statically_linked(binary):
904 return ", statically linked, " in runner.outs(['file', binary])
906 def get_qemu_arm_binary(arch):
907 if arch == "aarch64":
908 node = "/proc/sys/fs/binfmt_misc/aarch64"
909 if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
910 arm_binary = "qemu-arm64"
911 elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
912 arm_binary = "qemu-aarch64"
913 elif os.path.exists("/usr/bin/qemu-arm64-static"):
914 arm_binary = "qemu-arm64-static"
915 elif os.path.exists("/usr/bin/qemu-aarch64-static"):
916 arm_binary = "qemu-aarch64-static"
918 raise CreatorError("Please install a statically-linked %s" % arm_binary)
919 elif arch == "mipsel":
920 node = "/proc/sys/fs/binfmt_misc/mipsel"
921 arm_binary = "qemu-mipsel"
922 if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
923 arm_binary = "qemu-mipsel-static"
924 if not os.path.exists("/usr/bin/%s" % arm_binary):
925 raise CreatorError("Please install a statically-linked %s" % arm_binary)
927 node = "/proc/sys/fs/binfmt_misc/arm"
928 arm_binary = "qemu-arm"
929 if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
930 arm_binary = "qemu-arm-static"
931 if not os.path.exists("/usr/bin/%s" % arm_binary):
932 raise CreatorError("Please install a statically-linked %s" % arm_binary)
934 return (arm_binary, node)
936 def setup_qemu_emulator(rootdir, arch):
938 # mount binfmt_misc if it doesn't exist
939 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
940 modprobecmd = find_binary_path("modprobe")
941 runner.show([modprobecmd, "binfmt_misc"])
942 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
943 mountcmd = find_binary_path("mount")
944 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
946 # qemu_emulator is a special case, we can't use find_binary_path
947 # qemu emulator should be a statically-linked executable file
948 arm_binary, node = get_qemu_arm_binary(arch)
949 qemu_emulator = "/usr/bin/%s" % arm_binary
951 if not os.path.exists(rootdir + "/usr/bin"):
952 makedirs(rootdir + "/usr/bin")
953 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
954 qemu_emulators.append(qemu_emulator)
956 # disable selinux, selinux will block qemu emulator to run
957 if os.path.exists("/usr/sbin/setenforce"):
958 msger.info('Try to disable selinux')
959 runner.show(["/usr/sbin/setenforce", "0"])
961 # register qemu emulator for interpreting other arch executable file
962 if not os.path.exists(node):
963 if arch == "aarch64":
964 qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
965 elif arch == "mipsel":
966 qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
968 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
970 with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
971 fd.write(qemu_arm_string)
975 with open(node, "r") as fd:
976 for line in fd.readlines():
977 if line.startswith("flags:"):
978 flags = line[len("flags:"):].strip()
979 elif line.startswith("interpreter"):
980 interpreter = line[len("interpreter"):].strip()
982 if flags == "P" and interpreter.endswith("-binfmt"):
983 # copy binfmt wrapper when preserve-argv[0] flag is enabled
984 shutil.copy(os.path.realpath(interpreter), rootdir + interpreter)
985 qemu_emulators.append(interpreter)
986 elif not flags and interpreter != qemu_emulator:
987 # create symlink as registered qemu emulator
988 os.symlink(qemu_emulator, rootdir + interpreter)
989 qemu_emulators.append(interpreter)
991 return qemu_emulators
993 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
994 def get_source_repometadata(repometadata):
996 for repo in repometadata:
997 if repo["name"].endswith("-source"):
998 src_repometadata.append(repo)
1000 return src_repometadata
1003 def get_src_name(srpm):
1004 m = SRPM_RE.match(srpm)
1009 src_repometadata = get_source_repometadata(repometadata)
1011 if not src_repometadata:
1012 msger.warning("No source repo found")
1018 for repo in src_repometadata:
1019 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1020 lpkgs_path += glob.glob(cachepath)
1022 for lpkg in lpkgs_path:
1023 lpkg_name = get_src_name(os.path.basename(lpkg))
1024 lpkgs_dict[lpkg_name] = lpkg
1025 localpkgs = lpkgs_dict.keys()
1028 destdir = instroot+'/usr/src/SRPMS'
1029 if not os.path.exists(destdir):
1030 os.makedirs(destdir)
1034 srcpkg_name = get_source_name(_pkg, repometadata)
1037 srcpkgset.add(srcpkg_name)
1039 for pkg in list(srcpkgset):
1040 if pkg in localpkgs:
1042 shutil.copy(lpkgs_dict[pkg], destdir)
1043 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1045 src_pkg = get_package(pkg, src_repometadata, 'src')
1047 shutil.copy(src_pkg, destdir)
1048 src_pkgs.append(src_pkg)
1049 msger.info("%d source packages gotten from cache" % cached_count)
1053 def strip_end(text, suffix):
1054 if not text.endswith(suffix):
1056 return text[:-len(suffix)]
1058 def strip_archive_suffix(filename):
1059 for suffix in get_archive_suffixes():
1060 if filename.endswith(suffix):
1061 return filename[:-len(suffix)]
1063 msger.warning("Not supported archive file format: %s" % filename)