3 # Copyright (c) 2010, 2011 Intel Inc.
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 from __future__ import with_statement
31 from hashlib import md5
36 import sqlite3 as sqlite
41 from xml.etree import cElementTree
44 xmlparse = cElementTree.parse
47 from fs_related import *
48 from rpmmisc import myurlgrab
49 from proxy import get_proxy_for
54 RPM_RE = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(ver_rel)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
59 """Detect linux distribution, support "meego"
62 support_dists = ('SuSE',
71 (dist, ver, id) = platform.linux_distribution( \
72 supported_dists = support_dists)
74 (dist, ver, id) = platform.dist( \
75 supported_dists = support_dists)
77 return (dist, ver, id)
80 """Get composited string for current linux distribution
82 (dist, ver, id) = get_distro()
85 return 'Unknown Linux Distro'
87 distro_str = ' '.join(map(str.strip, (dist, ver, id)))
88 return distro_str.strip()
91 def hide_loopdev_presentation():
92 udev_rules = "80-prevent-loop-present.rules"
94 '/usr/lib/udev/rules.d/',
99 for rdir in udev_rules_dir:
100 if os.path.exists(rdir):
101 _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
103 if not _LOOP_RULE_PTH:
107 with open(_LOOP_RULE_PTH, 'w') as wf:
108 wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
110 runner.quiet('udevadm trigger')
114 def unhide_loopdev_presentation():
115 if not _LOOP_RULE_PTH:
119 os.unlink(_LOOP_RULE_PTH)
120 runner.quiet('udevadm trigger')
124 def extract_rpm(rpmfile, targetdir):
125 rpm2cpio = find_binary_path("rpm2cpio")
126 cpio = find_binary_path("cpio")
131 msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
132 p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
133 p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
134 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
135 (sout, serr) = p2.communicate()
136 msger.verbose(sout or serr)
140 def compressing(fpath, method):
145 if method not in comp_map:
146 raise CreatorError("Unsupport compress format: %s, valid values: %s"
147 % (method, ','.join(comp_map.keys())))
148 cmd = find_binary_path(comp_map[method])
149 rc = runner.show([cmd, "-f", fpath])
151 raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath))
153 def taring(dstfile, target):
155 basen, ext = os.path.splitext(dstfile)
156 comp = {".tar": None,
157 ".gz": "gz", # for .tar.gz
158 ".bz2": "bz2", # for .tar.bz2
162 # specify tarball file path
165 elif basen.endswith(".tar"):
168 tarpath = basen + ".tar"
169 wf = tarfile.open(tarpath, 'w')
171 if os.path.isdir(target):
172 for item in os.listdir(target):
173 wf.add(os.path.join(target, item), item)
175 wf.add(target, os.path.basename(target))
179 compressing(tarpath, comp)
180 # when dstfile ext is ".tgz" and ".tbz", should rename
181 if not basen.endswith(".tar"):
182 shutil.move("%s.%s" % (tarpath, comp), dstfile)
184 def ziping(dstfile, target):
186 wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED)
187 if os.path.isdir(target):
188 for item in os.listdir(target):
189 fpath = os.path.join(target, item)
190 if not os.path.isfile(fpath):
192 wf.write(fpath, item, zipfile.ZIP_DEFLATED)
194 wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED)
206 def packing(dstfile, target):
207 (base, ext) = os.path.splitext(dstfile)
208 if ext in (".gz", ".bz2") and base.endswith(".tar"):
210 if ext not in pack_formats:
211 raise CreatorError("Unsupport pack format: %s, valid values: %s"
212 % (ext, ','.join(pack_formats.keys())))
213 func = pack_formats[ext]
214 # func should be callable
215 func(dstfile, target)
217 def human_size(size):
218 """Return human readable string for Bytes size
224 measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
225 expo = int(math.log(size, 1024))
226 mant = float(size/math.pow(1024, expo))
227 return "{0:.1f}{1:s}".format(mant, measure[expo])
229 def check_space_pre_cp(src, dst):
230 """Check whether disk space is enough before 'cp' like
231 operations, else exception will be raised.
234 srcsize = get_file_size(src) * 1024 * 1024
235 freesize = get_filesystem_avail(dst)
236 if srcsize > freesize:
237 raise CreatorError("space on %s(%s) is not enough for about %s files"
238 % (dst, human_size(freesize), human_size(srcsize)))
240 def get_md5sum(fpath):
241 blksize = 65536 # should be optimized enough
244 with open(fpath, 'rb') as f:
246 data = f.read(blksize)
250 return md5sum.hexdigest()
252 def normalize_ksfile(ksconf, release, arch):
259 if not os.path.exists(ksconf):
264 if not arch or re.match(r'i.86', arch):
267 with open(ksconf) as f:
270 if "@ARCH@" in ksc or "@BUILD_ID@" in ksc:
271 msger.info("Substitute macro variable @BUILD_ID@/@ARCH in ks: %s" % ksconf)
272 ksc = ksc.replace("@ARCH@", arch)
273 ksc = ksc.replace("@BUILD_ID@", release)
274 fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf), dir="/tmp/")
278 msger.debug('new ks path %s' % ksconf)
281 atexit.register(_clrtempks)
285 def _check_mic_chroot(rootdir):
287 return rootdir.rstrip('/') + path
289 release_files = map(_path, [ "/etc/moblin-release",
290 "/etc/meego-release",
291 "/etc/tizen-release"])
293 if not any(map(os.path.exists, release_files)):
294 msger.warning("Dir %s is not a MeeGo/Tizen chroot env")
296 if not glob.glob(rootdir + "/boot/vmlinuz-*"):
297 msger.warning("Failed to find kernel module under %s" % rootdir)
301 def selinux_check(arch, fstypes):
303 getenforce = find_binary_path('getenforce')
307 selinux_status = runner.outs([getenforce])
308 if arch and arch.startswith("arm") and selinux_status == "Enforcing":
309 raise CreatorError("Can't create arm image if selinux is enabled, "
310 "please run 'setenforce 0' to disable selinux")
312 use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
313 if use_btrfs and selinux_status == "Enforcing":
314 raise CreatorError("Can't create btrfs image if selinux is enabled,"
315 " please run 'setenforce 0' to disable selinux")
317 def get_image_type(path):
318 def _get_extension_name(path):
319 match = re.search("(?<=\.)\w+$", path)
321 return match.group(0)
325 if os.path.isdir(path):
326 _check_mic_chroot(path)
338 extension = _get_extension_name(path)
339 if extension in maptab:
340 return maptab[extension]
342 fd = open(path, "rb")
343 file_header = fd.read(1024)
345 vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
346 if file_header[0:len(vdi_flag)] == vdi_flag:
349 output = runner.outs(['file', path])
350 isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
351 usbimgptn = re.compile(r".*x86 boot sector.*active.*")
352 rawptn = re.compile(r".*x86 boot sector.*")
353 vmdkptn = re.compile(r".*VMware. disk image.*")
354 ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
355 ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
356 btrfsimgptn = re.compile(r".*BTRFS.*")
357 if isoptn.match(output):
359 elif usbimgptn.match(output):
360 return maptab["usbimg"]
361 elif rawptn.match(output):
363 elif vmdkptn.match(output):
364 return maptab["vmdk"]
365 elif ext3fsimgptn.match(output):
367 elif ext4fsimgptn.match(output):
369 elif btrfsimgptn.match(output):
372 raise CreatorError("Cannot detect the type of image: %s" % path)
374 def get_file_size(file):
375 """ Return size in MB unit """
376 rc, duOutput = runner.runtool(['du', "-s", "-b", "-B", "1M", file])
378 raise CreatorError("Failed to run %s" % du)
380 size1 = int(duOutput.split()[0])
381 rc, duOutput = runner.runtool(['du', "-s", "-B", "1M", file])
383 raise CreatorError("Failed to run %s" % du)
385 size2 = int(duOutput.split()[0])
391 def get_filesystem_avail(fs):
392 vfstat = os.statvfs(fs)
393 return vfstat.f_bavail * vfstat.f_bsize
395 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
398 raise CreatorError("Invalid destination image format: %s" % dstfmt)
399 msger.debug("converting %s image to %s" % (srcimg, dstimg))
401 path = find_binary_path("qemu-img")
402 argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
403 elif srcfmt == "vdi":
404 path = find_binary_path("VBoxManage")
405 argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
407 raise CreatorError("Invalid soure image format: %s" % srcfmt)
409 rc = runner.show(argv)
411 msger.debug("convert successful")
413 raise CreatorError("Unable to convert disk to %s" % dstfmt)
415 def uncompress_squashfs(squashfsimg, outdir):
416 """Uncompress file system from squshfs image"""
417 unsquashfs = find_binary_path("unsquashfs")
418 args = [ unsquashfs, "-d", outdir, squashfsimg ]
419 rc = runner.show(args)
421 raise SquashfsError("Failed to uncompress %s." % squashfsimg)
423 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
424 """ FIXME: use the dir in mic.conf instead """
427 return tempfile.mkdtemp(dir = dir, prefix = prefix)
429 def get_repostrs_from_ks(ks):
430 def _get_temp_reponame(baseurl):
431 md5obj = hashlib.md5(baseurl)
432 tmpreponame = "%s" % md5obj.hexdigest()
437 for repodata in ks.handler.repo.repoList:
442 'includepkgs', # val is list
443 'excludepkgs', # val is list
455 if hasattr(repodata, attr) and getattr(repodata, attr):
456 repo[attr] = getattr(repodata, attr)
458 if 'name' not in repo:
459 repo['name'] = _get_temp_reponame(repodata.baseurl)
461 kickstart_repos.append(repo)
463 return kickstart_repos
465 def _get_uncompressed_data_from_url(url, filename, proxies):
466 filename = myurlgrab(url, filename, proxies)
468 if filename.endswith(".gz"):
470 runner.quiet(['gunzip', "-f", filename])
471 elif filename.endswith(".bz2"):
473 runner.quiet(['bunzip2', "-f", filename])
475 filename = filename.replace(suffix, "")
478 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
479 sumtype=None, checksum=None):
480 url = os.path.join(baseurl, filename)
481 filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
482 if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
483 filename = os.path.splitext(filename_tmp)[0]
485 filename = filename_tmp
486 if sumtype and checksum and os.path.exists(filename):
488 sumcmd = find_binary_path("%ssum" % sumtype)
492 file_checksum = runner.outs([sumcmd, filename]).split()[0]
494 if file_checksum and file_checksum == checksum:
497 return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
499 def get_metadata_from_repos(repos, cachedir):
500 my_repo_metadata = []
502 reponame = repo['name']
503 baseurl = repo['baseurl']
507 proxy = repo['proxy']
509 proxy = get_proxy_for(baseurl)
513 proxies = {str(baseurl.split(":")[0]):str(proxy)}
515 makedirs(os.path.join(cachedir, reponame))
516 url = os.path.join(baseurl, "repodata/repomd.xml")
517 filename = os.path.join(cachedir, reponame, 'repomd.xml')
518 repomd = myurlgrab(url, filename, proxies)
520 root = xmlparse(repomd)
522 raise CreatorError("repomd.xml syntax error.")
524 ns = root.getroot().tag
525 ns = ns[0:ns.rindex("}")+1]
531 for elm in root.getiterator("%sdata" % ns):
532 if elm.attrib["type"] == "patterns":
533 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
534 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
535 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
538 for elm in root.getiterator("%sdata" % ns):
539 if elm.attrib["type"] in ("group_gz", "group"):
540 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
541 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
542 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
546 for elm in root.getiterator("%sdata" % ns):
547 if elm.attrib["type"] in ("primary_db", "primary"):
548 primary_type = elm.attrib["type"]
549 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
550 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
551 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
557 for item in ("primary", "patterns", "comps"):
558 if item not in filepaths:
559 filepaths[item] = None
561 if not filepaths[item]:
563 filepaths[item] = _get_metadata_from_repo(baseurl,
573 repokey = _get_metadata_from_repo(baseurl,
577 "repodata/repomd.xml.key")
580 msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
582 my_repo_metadata.append({"name":reponame,
585 "primary":filepaths['primary'],
588 "patterns":filepaths['patterns'],
589 "comps":filepaths['comps'],
592 return my_repo_metadata
594 def get_rpmver_in_repo(repometadata):
595 for repo in repometadata:
596 if repo["primary"].endswith(".xml"):
597 root = xmlparse(repo["primary"])
598 ns = root.getroot().tag
599 ns = ns[0:ns.rindex("}")+1]
602 for elm in root.getiterator("%spackage" % ns):
603 if elm.find("%sname" % ns).text == 'rpm':
604 for node in elm.getchildren():
605 if node.tag == "%sversion" % ns:
606 versionlist.append(node.attrib['ver'])
612 key = lambda ver: map(int, ver.split('.')))).next()
614 elif repo["primary"].endswith(".sqlite"):
615 con = sqlite.connect(repo["primary"])
616 for row in con.execute("select version from packages where "
617 "name=\"rpm\" ORDER by version DESC"):
623 def get_arch(repometadata):
625 for repo in repometadata:
626 if repo["primary"].endswith(".xml"):
627 root = xmlparse(repo["primary"])
628 ns = root.getroot().tag
629 ns = ns[0:ns.rindex("}")+1]
630 for elm in root.getiterator("%spackage" % ns):
631 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
632 arch = elm.find("%sarch" % ns).text
633 if arch not in archlist:
634 archlist.append(arch)
635 elif repo["primary"].endswith(".sqlite"):
636 con = sqlite.connect(repo["primary"])
637 for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
638 if row[0] not in archlist:
639 archlist.append(row[0])
644 for i in range(len(archlist)):
645 if archlist[i] not in rpmmisc.archPolicies.keys():
649 while j < len(uniq_arch):
650 if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
653 if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
655 uniq_arch[j] = archlist[i]
658 uniq_arch.remove(uniq_arch[j])
662 uniq_arch.append(archlist[i])
664 return uniq_arch, archlist
666 def get_package(pkg, repometadata, arch = None):
671 elif arch not in rpmmisc.archPolicies:
674 arches = rpmmisc.archPolicies[arch].split(':')
675 arches.append('noarch')
677 for repo in repometadata:
678 if repo["primary"].endswith(".xml"):
679 root = xmlparse(repo["primary"])
680 ns = root.getroot().tag
681 ns = ns[0:ns.rindex("}")+1]
682 for elm in root.getiterator("%spackage" % ns):
683 if elm.find("%sname" % ns).text == pkg:
684 if elm.find("%sarch" % ns).text in arches:
685 version = elm.find("%sversion" % ns)
686 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
689 location = elm.find("%slocation" % ns)
690 pkgpath = "%s" % location.attrib['href']
693 if repo["primary"].endswith(".sqlite"):
694 con = sqlite.connect(repo["primary"])
696 sql = 'select version, release, location_href from packages ' \
697 'where name = "%s" and arch IN ("%s")' % \
698 (pkg, '","'.join(arches))
699 for row in con.execute(sql):
700 tmpver = "%s-%s" % (row[0], row[1])
703 pkgpath = "%s" % row[2]
707 sql = 'select version, release, location_href from packages ' \
708 'where name = "%s"' % pkg
709 for row in con.execute(sql):
710 tmpver = "%s-%s" % (row[0], row[1])
713 pkgpath = "%s" % row[2]
718 makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
719 url = os.path.join(target_repo["baseurl"], pkgpath)
720 filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
721 if os.path.exists(filename):
722 ret = rpmmisc.checkRpmIntegrity('rpm', filename)
726 msger.warning("package %s is damaged: %s" %
727 (os.path.basename(filename), filename))
730 pkg = myurlgrab(str(url), filename, target_repo["proxies"])
735 def get_source_name(pkg, repometadata):
737 def get_bin_name(pkg):
738 m = RPM_RE.match(pkg)
743 def get_src_name(srpm):
744 m = SRPM_RE.match(srpm)
752 pkg_name = get_bin_name(pkg)
756 for repo in repometadata:
757 if repo["primary"].endswith(".xml"):
758 root = xmlparse(repo["primary"])
759 ns = root.getroot().tag
760 ns = ns[0:ns.rindex("}")+1]
761 for elm in root.getiterator("%spackage" % ns):
762 if elm.find("%sname" % ns).text == pkg_name:
763 if elm.find("%sarch" % ns).text != "src":
764 version = elm.find("%sversion" % ns)
765 tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
768 fmt = elm.find("%sformat" % ns)
770 fns = fmt.getchildren()[0].tag
771 fns = fns[0:fns.rindex("}")+1]
772 pkgpath = fmt.find("%ssourcerpm" % fns).text
776 if repo["primary"].endswith(".sqlite"):
777 con = sqlite.connect(repo["primary"])
778 for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
779 tmpver = "%s-%s" % (row[0], row[1])
781 pkgpath = "%s" % row[2]
786 return get_src_name(pkgpath)
790 def get_pkglist_in_patterns(group, patterns):
794 root = xmlparse(patterns)
796 raise SyntaxError("%s syntax error." % patterns)
798 for elm in list(root.getroot()):
800 ns = ns[0:ns.rindex("}")+1]
801 name = elm.find("%sname" % ns)
802 summary = elm.find("%ssummary" % ns)
803 if name.text == group or summary.text == group:
811 for requires in list(elm):
812 if requires.tag.endswith("requires"):
819 for pkg in list(requires):
820 pkgname = pkg.attrib["name"]
821 if pkgname not in pkglist:
822 pkglist.append(pkgname)
826 def get_pkglist_in_comps(group, comps):
830 root = xmlparse(comps)
832 raise SyntaxError("%s syntax error." % comps)
834 for elm in root.getiterator("group"):
836 name = elm.find("name")
837 if id.text == group or name.text == group:
838 packagelist = elm.find("packagelist")
845 for require in elm.getiterator("packagereq"):
846 if require.tag.endswith("packagereq"):
847 pkgname = require.text
848 if pkgname not in pkglist:
849 pkglist.append(pkgname)
853 def is_statically_linked(binary):
854 return ", statically linked, " in runner.outs(['file', binary])
856 def setup_qemu_emulator(rootdir, arch):
857 # mount binfmt_misc if it doesn't exist
858 if not os.path.exists("/proc/sys/fs/binfmt_misc"):
859 modprobecmd = find_binary_path("modprobe")
860 runner.show([modprobecmd, "binfmt_misc"])
861 if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
862 mountcmd = find_binary_path("mount")
863 runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
865 # qemu_emulator is a special case, we can't use find_binary_path
866 # qemu emulator should be a statically-linked executable file
867 qemu_emulator = "/usr/bin/qemu-arm"
868 if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
869 qemu_emulator = "/usr/bin/qemu-arm-static"
870 if not os.path.exists(qemu_emulator):
871 raise CreatorError("Please install a statically-linked qemu-arm")
873 # qemu emulator version check
874 armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')]
875 if arch in armv7_list: # need qemu (>=0.13.0)
876 qemuout = runner.outs([qemu_emulator, "-h"])
877 m = re.search("version\s*([.\d]+)", qemuout)
879 qemu_version = m.group(1)
880 if qemu_version < "0.13":
881 raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch))
883 msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator)
885 if not os.path.exists(rootdir + "/usr/bin"):
886 makedirs(rootdir + "/usr/bin")
887 shutil.copy(qemu_emulator, rootdir + qemu_emulator)
889 # disable selinux, selinux will block qemu emulator to run
890 if os.path.exists("/usr/sbin/setenforce"):
891 msger.info('Try to disable selinux')
892 runner.show(["/usr/sbin/setenforce", "0"])
894 node = "/proc/sys/fs/binfmt_misc/arm"
895 if is_statically_linked(qemu_emulator) and os.path.exists(node):
898 # unregister it if it has been registered and is a dynamically-linked executable
899 if not is_statically_linked(qemu_emulator) and os.path.exists(node):
900 qemu_unregister_string = "-1\n"
901 fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
902 fd.write(qemu_unregister_string)
905 # register qemu emulator for interpreting other arch executable file
906 if not os.path.exists(node):
907 qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
908 fd = open("/proc/sys/fs/binfmt_misc/register", "w")
909 fd.write(qemu_arm_string)
914 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
915 def get_source_repometadata(repometadata):
917 for repo in repometadata:
918 if repo["name"].endswith("-source"):
919 src_repometadata.append(repo)
921 return src_repometadata
924 def get_src_name(srpm):
925 m = SRPM_RE.match(srpm)
930 src_repometadata = get_source_repometadata(repometadata)
932 if not src_repometadata:
933 msger.warning("No source repo found")
939 for repo in src_repometadata:
940 cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
941 lpkgs_path += glob.glob(cachepath)
943 for lpkg in lpkgs_path:
944 lpkg_name = get_src_name(os.path.basename(lpkg))
945 lpkgs_dict[lpkg_name] = lpkg
946 localpkgs = lpkgs_dict.keys()
949 destdir = instroot+'/usr/src/SRPMS'
950 if not os.path.exists(destdir):
955 srcpkg_name = get_source_name(_pkg, repometadata)
958 srcpkgset.add(srcpkg_name)
960 for pkg in list(srcpkgset):
963 shutil.copy(lpkgs_dict[pkg], destdir)
964 src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
966 src_pkg = get_package(pkg, src_repometadata, 'src')
968 shutil.copy(src_pkg, destdir)
969 src_pkgs.append(src_pkg)
970 msger.info("%d source packages gotten from cache" % cached_count)
974 def strip_end(text, suffix):
975 if not text.endswith(suffix):
977 return text[:-len(suffix)]