fix run error issue for parser repomd.xml when here is group type
[tools/mic.git] / mic / utils / misc.py
old mode 100644 (file)
new mode 100755 (executable)
index 30022c9..399fd4e
@@ -15,6 +15,7 @@
 # with this program; if not, write to the Free Software Foundation, Inc., 59
 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 
+from __future__ import with_statement
 import os
 import sys
 import time
@@ -25,9 +26,8 @@ import glob
 import hashlib
 import subprocess
 import platform
-import rpmmisc
-import hashlib
-import sqlite3 as sqlite
+import traceback
+
 
 try:
     import sqlite3 as sqlite
@@ -40,18 +40,22 @@ except ImportError:
     import cElementTree
 xmlparse = cElementTree.parse
 
-from errors import *
-from fs_related import *
-from grabber import myurlgrab
-from proxy import get_proxy_for
-import runner
-
 from mic import msger
+from mic.archive import get_archive_suffixes
+from mic.utils.errors import CreatorError, SquashfsError
+from mic.utils.fs_related import find_binary_path, makedirs
+from mic.utils.grabber import myurlgrab
+from mic.utils.proxy import get_proxy_for
+from mic.utils import runner
+from mic.utils import rpmmisc
+from mic.utils.safeurl import SafeURL
+
 
 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
 
+
 def build_name(kscfg, release=None, prefix = None, suffix = None):
     """Construct and return an image name string.
 
@@ -111,15 +115,21 @@ def get_distro():
 
     return (dist, ver, id)
 
-def get_distro_str():
+def get_hostname():
+    """Get hostname
+    """
+    return platform.node()
+
+def get_hostname_distro_str():
     """Get composited string for current linux distribution
     """
     (dist, ver, id) = get_distro()
+    hostname = get_hostname()
 
     if not dist:
-        return 'Unknown Linux Distro'
+        return "%s(Unknown Linux Distribution)" % hostname
     else:
-        distro_str = ' '.join(map(str.strip, (dist, ver, id)))
+        distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
         return distro_str.strip()
 
 _LOOP_RULE_PTH = None
@@ -150,7 +160,7 @@ def hide_loopdev_presentation():
         pass
 
 def unhide_loopdev_presentation():
-    global _LOOP_RULE_PTH
+    #global _LOOP_RULE_PTH
 
     if not _LOOP_RULE_PTH:
         return
@@ -172,88 +182,12 @@ def extract_rpm(rpmfile, targetdir):
     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    p1.stdout.close()
     (sout, serr) = p2.communicate()
     msger.verbose(sout or serr)
 
     os.chdir(olddir)
 
-def compressing(fpath, method):
-    comp_map = {
-        "gz": "gzip",
-        "bz2": "bzip2"
-    }
-    if method not in comp_map:
-        raise CreatorError("Unsupport compress format: %s, valid values: %s"
-                           % (method, ','.join(comp_map.keys())))
-    cmd = find_binary_path(comp_map[method])
-    rc = runner.show([cmd, "-f", fpath])
-    if rc:
-        raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
-
-def taring(dstfile, target):
-    import tarfile
-    basen, ext = os.path.splitext(dstfile)
-    comp = {".tar": None,
-            ".gz": "gz", # for .tar.gz
-            ".bz2": "bz2", # for .tar.bz2
-            ".tgz": "gz",
-            ".tbz": "bz2"}[ext]
-
-    # specify tarball file path
-    if not comp:
-        tarpath = dstfile
-    elif basen.endswith(".tar"):
-        tarpath = basen
-    else:
-        tarpath = basen + ".tar"
-    wf = tarfile.open(tarpath, 'w')
-
-    if os.path.isdir(target):
-        for item in os.listdir(target):
-            wf.add(os.path.join(target, item), item)
-    else:
-        wf.add(target, os.path.basename(target))
-    wf.close()
-
-    if comp:
-        compressing(tarpath, comp)
-        # when dstfile ext is ".tgz" and ".tbz", should rename
-        if not basen.endswith(".tar"):
-            shutil.move("%s.%s" % (tarpath, comp), dstfile)
-
-def ziping(dstfile, target):
-    import zipfile
-    wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
-    if os.path.isdir(target):
-        for item in os.listdir(target):
-            fpath = os.path.join(target, item)
-            if not os.path.isfile(fpath):
-                continue
-            wf.write(fpath, item, zipfile.ZIP_DEFLATED)
-    else:
-        wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
-    wf.close()
-
-pack_formats = {
-    ".tar": taring,
-    ".tar.gz": taring,
-    ".tar.bz2": taring,
-    ".tgz": taring,
-    ".tbz": taring,
-    ".zip": ziping,
-}
-
-def packing(dstfile, target):
-    (base, ext) = os.path.splitext(dstfile)
-    if ext in (".gz", ".bz2") and base.endswith(".tar"):
-        ext = ".tar" + ext
-    if ext not in pack_formats:
-        raise CreatorError("Unsupport pack format: %s, valid values: %s"
-                           % (ext, ','.join(pack_formats.keys())))
-    func = pack_formats[ext]
-    # func should be callable
-    func(dstfile, target)
-
 def human_size(size):
     """Return human readable string for Bytes size
     """
@@ -266,6 +200,18 @@ def human_size(size):
     mant = float(size/math.pow(1024, expo))
     return "{0:.1f}{1:s}".format(mant, measure[expo])
 
+def get_block_size(file_obj):
+    """ Returns block size for file object 'file_obj'. Errors are indicated by
+    the 'IOError' exception. """
+
+    from fcntl import ioctl
+    import struct
+
+    # Get the block size of the host file-system for the image file by calling
+    # the FIGETBSZ ioctl (number 2).
+    binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
+    return struct.unpack('I', binary_data)[0]
+
 def check_space_pre_cp(src, dst):
     """Check whether disk space is enough before 'cp' like
     operations, else exception will be raised.
@@ -289,7 +235,7 @@ def calc_hashes(file_path, hash_names, start = 0, end = None):
         end = os.path.getsize(file_path)
 
     chunk_size = 65536
-    to_read = end - start;
+    to_read = end - start
     read = 0
 
     hashes = []
@@ -316,15 +262,21 @@ def calc_hashes(file_path, hash_names, start = 0, end = None):
 def get_md5sum(fpath):
     return calc_hashes(fpath, ('md5', ))[0]
 
+def get_sha1sum(fpath):
+    return calc_hashes(fpath, ('sha1', ))[0]
+
+def get_sha256sum(fpath):
+    return calc_hashes(fpath, ('sha256', ))[0]
+
 def normalize_ksfile(ksconf, release, arch):
-    def _clrtempks():
-        try:
-            os.unlink(ksconf)
-        except:
-            pass
+    '''
+    Return the name of a normalized ks file in which macro variables
+    @BUILD_ID@ and @ARCH@ are replace with real values.
 
-    if not os.path.exists(ksconf):
-        return
+    The original ks file is returned if no special macro is used, otherwise
+    a temp file is created and returned, which will be deleted when program
+    exits normally.
+    '''
 
     if not release:
         release = "latest"
@@ -334,21 +286,31 @@ def normalize_ksfile(ksconf, release, arch):
     with open(ksconf) as f:
         ksc = f.read()
 
-    if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
-        msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
-        ksc = ksc.replace("@ARCH@", arch)
-        ksc = ksc.replace("@BUILD_ID@", release)
-        fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
-        os.write(fd, ksc)
-        os.close(fd)
+    if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
+        return ksconf
+
+    msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
+    ksc = ksc.replace("@ARCH@", arch)
+    ksc = ksc.replace("@BUILD_ID@", release)
 
-        msger.debug('new ks path %s' % ksconf)
+    fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
+    os.write(fd, ksc)
+    os.close(fd)
 
-        import atexit
-        atexit.register(_clrtempks)
+    msger.debug('normalized ks file:%s' % ksconf)
+
+    def remove_temp_ks():
+        try:
+            os.unlink(ksconf)
+        except OSError as err:
+            msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
+
+    import atexit
+    atexit.register(remove_temp_ks)
 
     return ksconf
 
+
 def _check_mic_chroot(rootdir):
     def _path(path):
         return rootdir.rstrip('/') + path
@@ -525,13 +487,17 @@ def get_repostrs_from_ks(ks):
 
         if 'name' not in repo:
             repo['name'] = _get_temp_reponame(repodata.baseurl)
+        if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
+            repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
+                                      getattr(repodata, 'user', None),
+                                      getattr(repodata, 'passwd', None))
 
         kickstart_repos.append(repo)
 
     return kickstart_repos
 
 def _get_uncompressed_data_from_url(url, filename, proxies):
-    filename = myurlgrab(url, filename, proxies)
+    filename = myurlgrab(url.full, filename, proxies)
     suffix = None
     if filename.endswith(".gz"):
         suffix = ".gz"
@@ -545,7 +511,7 @@ def _get_uncompressed_data_from_url(url, filename, proxies):
 
 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
                             sumtype=None, checksum=None):
-    url = os.path.join(baseurl, filename)
+    url = baseurl.join(filename)
     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
         filename = os.path.splitext(filename_tmp)[0]
@@ -567,23 +533,22 @@ def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
 def get_metadata_from_repos(repos, cachedir):
     my_repo_metadata = []
     for repo in repos:
-        reponame = repo['name']
-        baseurl  = repo['baseurl']
+        reponame = repo.name
+        baseurl = repo.baseurl
 
-
-        if 'proxy' in repo:
-            proxy = repo['proxy']
+        if hasattr(repo, 'proxy'):
+            proxy = repo.proxy
         else:
             proxy = get_proxy_for(baseurl)
 
         proxies = None
         if proxy:
-            proxies = {str(baseurl.split(":")[0]):str(proxy)}
+            proxies = {str(baseurl.split(":")[0]): str(proxy)}
 
         makedirs(os.path.join(cachedir, reponame))
-        url = os.path.join(baseurl, "repodata/repomd.xml")
+        url = baseurl.join("repodata/repomd.xml")
         filename = os.path.join(cachedir, reponame, 'repomd.xml')
-        repomd = myurlgrab(url, filename, proxies)
+        repomd = myurlgrab(url.full, filename, proxies)
         try:
             root = xmlparse(repomd)
         except SyntaxError:
@@ -604,7 +569,8 @@ def get_metadata_from_repos(repos, cachedir):
                 break
 
         for elm in root.getiterator("%sdata" % ns):
-            if elm.attrib["type"] in ("group_gz", "group"):
+            #"group" type has no "open-checksum" filed, remove it.
+            if elm.attrib["type"] == "group_gz":
                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
@@ -655,7 +621,8 @@ def get_metadata_from_repos(repos, cachedir):
                                  "proxies":proxies,
                                  "patterns":filepaths['patterns'],
                                  "comps":filepaths['comps'],
-                                 "repokey":repokey})
+                                 "repokey":repokey,
+                                 "priority":repo.priority})
 
     return my_repo_metadata
 
@@ -689,35 +656,8 @@ def get_rpmver_in_repo(repometadata):
     return None
 
 def get_arch(repometadata):
-    def uniqarch(archlist=[]):
-        uniq_arch = []
-        for i in range(len(archlist)):
-            if archlist[i] not in rpmmisc.archPolicies.keys():
-                continue
-            need_append = True
-            j = 0
-            while j < len(uniq_arch):
-                if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
-                    need_append = False
-                    break
-                if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
-                    if need_append:
-                        uniq_arch[j] = archlist[i]
-                        need_append = False
-                    else:
-                        uniq_arch.remove(uniq_arch[j])
-                        continue
-                j += 1
-            if need_append:
-                uniq_arch.append(archlist[i])
-
-        return uniq_arch
-
-
-    ret_uniq_arch = []
-    ret_arch_list = []
+    archlist = []
     for repo in repometadata:
-        archlist = []
         if repo["primary"].endswith(".xml"):
             root = xmlparse(repo["primary"])
             ns = root.getroot().tag
@@ -735,16 +675,32 @@ def get_arch(repometadata):
 
             con.close()
 
-        uniq_arch = uniqarch(archlist)
-        if not ret_uniq_arch and len(uniq_arch) == 1:
-            ret_uniq_arch = uniq_arch
-        ret_arch_list += uniq_arch
+    uniq_arch = []
+    for i in range(len(archlist)):
+        if archlist[i] not in rpmmisc.archPolicies.keys():
+            continue
+        need_append = True
+        j = 0
+        while j < len(uniq_arch):
+            if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
+                need_append = False
+                break
+            if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
+                if need_append:
+                    uniq_arch[j] = archlist[i]
+                    need_append = False
+                else:
+                    uniq_arch.remove(uniq_arch[j])
+                    continue
+            j += 1
+        if need_append:
+             uniq_arch.append(archlist[i])
 
-    ret_arch_list = uniqarch(ret_arch_list)
-    return ret_uniq_arch, ret_arch_list
+    return uniq_arch, archlist
 
 def get_package(pkg, repometadata, arch = None):
     ver = ""
+    priority = 99
     target_repo = None
     if not arch:
         arches = []
@@ -760,16 +716,25 @@ def get_package(pkg, repometadata, arch = None):
             ns = root.getroot().tag
             ns = ns[0:ns.rindex("}")+1]
             for elm in root.getiterator("%spackage" % ns):
-                if elm.find("%sname" % ns).text == pkg:
-                    if elm.find("%sarch" % ns).text in arches:
-                        version = elm.find("%sversion" % ns)
-                        tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
-                        if tmpver > ver:
-                            ver = tmpver
+                if elm.find("%sname" % ns).text == pkg and elm.find("%sarch" % ns).text in arches:
+                    if repo["priority"] != None:
+                        tmpprior = int(repo["priority"])
+                        if tmpprior < priority:
+                            priority = tmpprior
                             location = elm.find("%slocation" % ns)
                             pkgpath = "%s" % location.attrib['href']
                             target_repo = repo
-                        break
+                            break
+                        elif tmpprior > priority:
+                            break
+                    version = elm.find("%sversion" % ns)
+                    tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
+                    if tmpver > ver:
+                        ver = tmpver
+                        location = elm.find("%slocation" % ns)
+                        pkgpath = "%s" % location.attrib['href']
+                        target_repo = repo
+                    break
         if repo["primary"].endswith(".sqlite"):
             con = sqlite.connect(repo["primary"])
             if arch:
@@ -796,7 +761,7 @@ def get_package(pkg, repometadata, arch = None):
             con.close()
     if target_repo:
         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
-        url = os.path.join(target_repo["baseurl"], pkgpath)
+        url = target_repo["baseurl"].join(pkgpath)
         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
         if os.path.exists(filename):
             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
@@ -807,7 +772,7 @@ def get_package(pkg, repometadata, arch = None):
                           (os.path.basename(filename), filename))
             os.unlink(filename)
 
-        pkg = myurlgrab(str(url), filename, target_repo["proxies"])
+        pkg = myurlgrab(url.full, filename, target_repo["proxies"])
         return pkg
     else:
         return None
@@ -933,7 +898,38 @@ def get_pkglist_in_comps(group, comps):
 def is_statically_linked(binary):
     return ", statically linked, " in runner.outs(['file', binary])
 
+def get_qemu_arm_binary(arch):
+    if arch == "aarch64":
+        node = "/proc/sys/fs/binfmt_misc/aarch64"
+        if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
+            arm_binary = "qemu-arm64"
+        elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
+            arm_binary = "qemu-aarch64"
+        elif os.path.exists("/usr/bin/qemu-arm64-static"):
+            arm_binary = "qemu-arm64-static"
+        elif os.path.exists("/usr/bin/qemu-aarch64-static"):
+            arm_binary = "qemu-aarch64-static"
+        else:
+            raise CreatorError("Please install a statically-linked %s" % arm_binary)
+    elif arch == "mipsel":
+        node = "/proc/sys/fs/binfmt_misc/mipsel"
+        arm_binary = "qemu-mipsel"
+        if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
+            arm_binary = "qemu-mipsel-static"
+        if not os.path.exists("/usr/bin/%s" % arm_binary):
+            raise CreatorError("Please install a statically-linked %s" % arm_binary)
+    else:
+        node = "/proc/sys/fs/binfmt_misc/arm"
+        arm_binary = "qemu-arm"
+        if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
+            arm_binary = "qemu-arm-static"
+        if not os.path.exists("/usr/bin/%s" % arm_binary):
+            raise CreatorError("Please install a statically-linked %s" % arm_binary)
+
+    return (arm_binary, node)
+
 def setup_qemu_emulator(rootdir, arch):
+    qemu_emulators = []
     # mount binfmt_misc if it doesn't exist
     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
         modprobecmd = find_binary_path("modprobe")
@@ -944,50 +940,50 @@ def setup_qemu_emulator(rootdir, arch):
 
     # qemu_emulator is a special case, we can't use find_binary_path
     # qemu emulator should be a statically-linked executable file
-    qemu_emulator = "/usr/bin/qemu-arm"
-    if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
-        qemu_emulator = "/usr/bin/qemu-arm-static"
-    if not os.path.exists(qemu_emulator):
-        raise CreatorError("Please install a statically-linked qemu-arm")
-
-    # qemu emulator version check
-    armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
-    if arch in armv7_list:  # need qemu (>=0.13.0)
-        qemuout = runner.outs([qemu_emulator, "-h"])
-        m = re.search("version\s*([.\d]+)", qemuout)
-        if m:
-            qemu_version = m.group(1)
-            if qemu_version < "0.13":
-                raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
-        else:
-            msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
+    arm_binary, node = get_qemu_arm_binary(arch)
+    qemu_emulator = "/usr/bin/%s" % arm_binary
 
     if not os.path.exists(rootdir + "/usr/bin"):
         makedirs(rootdir + "/usr/bin")
-    shutil.copy(qemu_emulator, rootdir + "/usr/bin/qemu-arm-static")
-    qemu_emulator = "/usr/bin/qemu-arm-static"
+    shutil.copy(qemu_emulator, rootdir + qemu_emulator)
+    qemu_emulators.append(qemu_emulator)
 
     # disable selinux, selinux will block qemu emulator to run
     if os.path.exists("/usr/sbin/setenforce"):
         msger.info('Try to disable selinux')
         runner.show(["/usr/sbin/setenforce", "0"])
 
-    # unregister it if it has been registered and is a dynamically-linked executable
-    node = "/proc/sys/fs/binfmt_misc/arm"
-    if os.path.exists(node):
-        qemu_unregister_string = "-1\n"
-        fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
-        fd.write(qemu_unregister_string)
-        fd.close()
-
     # register qemu emulator for interpreting other arch executable file
     if not os.path.exists(node):
-        qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
-        fd = open("/proc/sys/fs/binfmt_misc/register", "w")
-        fd.write(qemu_arm_string)
-        fd.close()
+        if arch == "aarch64":
+            qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
+        elif arch == "mipsel":
+            qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
+        else:
+            qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
 
-    return qemu_emulator
+        with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
+            fd.write(qemu_arm_string)
+    else:
+        flags = ""
+        interpreter = ""
+        with open(node, "r") as fd:
+            for line in fd.readlines():
+                if line.startswith("flags:"):
+                    flags = line[len("flags:"):].strip()
+                elif line.startswith("interpreter"):
+                    interpreter = line[len("interpreter"):].strip()
+
+        if flags == "P" and interpreter.endswith("-binfmt"):
+            # copy binfmt wrapper when preserve-argv[0] flag is enabled
+            shutil.copy(os.path.realpath(interpreter), rootdir + interpreter)
+            qemu_emulators.append(interpreter)
+        elif not flags and interpreter != qemu_emulator:
+            # create symlink as registered qemu emulator
+            os.symlink(qemu_emulator, rootdir + interpreter)
+            qemu_emulators.append(interpreter)
+
+    return qemu_emulators
 
 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
     def get_source_repometadata(repometadata):
@@ -1053,3 +1049,11 @@ def strip_end(text, suffix):
     if not text.endswith(suffix):
         return text
     return text[:-len(suffix)]
+
+def strip_archive_suffix(filename):
+    for suffix in get_archive_suffixes():
+        if filename.endswith(suffix):
+            return filename[:-len(suffix)]
+    else:
+        msger.warning("Not supported archive file format: %s" % filename)
+    return None