Ignore the pylint warning of raising-bad-type and unbalanced-tuple-unpacking in conne...
[tools/mic.git] / mic / utils / misc.py
1 #!/usr/bin/python3 -tt
2 #
3 # Copyright (c) 2010, 2011 Intel Inc.
4 #
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation; version 2 of the License
8 #
9 # This program is distributed in the hope that it will be useful, but
10 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11 # or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 # for more details.
13 #
14 # You should have received a copy of the GNU General Public License along
15 # with this program; if not, write to the Free Software Foundation, Inc., 59
16 # Temple Place - Suite 330, Boston, MA 02111-1307, USA.
17
18
19 import os
20 import sys
21 import time
22 import tempfile
23 import re
24 import shutil
25 import glob
26 import hashlib
27 import subprocess
28 import platform
29 import traceback
30 import distro
31
32 try:
33     import sqlite3 as sqlite
34 except ImportError:
35     import sqlite
36
37 try:
38     from xml.etree import cElementTree
39 except ImportError:
40     import cElementTree
41 xmlparse = cElementTree.parse
42
43 from mic import msger
44 from mic.archive import get_archive_suffixes
45 from mic.utils.errors import CreatorError, SquashfsError
46 from mic.utils.fs_related import find_binary_path, makedirs
47 from mic.utils.grabber import myurlgrab
48 from mic.utils.proxy import get_proxy_for
49 from mic.utils import runner
50 from mic.utils import rpmmisc
51 from mic.utils.safeurl import SafeURL
52
53
54 RPM_RE  = re.compile("(.*)\.(.*) (.*)-(.*)")
55 RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
56 SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
57
58
59 def build_name(kscfg, release=None, prefix = None, suffix = None):
60     """Construct and return an image name string.
61
62     This is a utility function to help create sensible name and fslabel
63     strings. The name is constructed using the sans-prefix-and-extension
64     kickstart filename and the supplied prefix and suffix.
65
66     kscfg -- a path to a kickstart file
67     release --  a replacement to suffix for image release
68     prefix -- a prefix to prepend to the name; defaults to None, which causes
69               no prefix to be used
70     suffix -- a suffix to append to the name; defaults to None, which causes
71               a YYYYMMDDHHMM suffix to be used
72
73     Note, if maxlen is less then the len(suffix), you get to keep both pieces.
74
75     """
76     name = os.path.basename(kscfg)
77     idx = name.rfind('.')
78     if idx >= 0:
79         name = name[:idx]
80
81     if release is not None:
82         suffix = ""
83     if prefix is None:
84         prefix = ""
85     if suffix is None:
86         suffix = time.strftime("%Y%m%d%H%M")
87
88     if name.startswith(prefix):
89         name = name[len(prefix):]
90
91     prefix = "%s-" % prefix if prefix else ""
92     suffix = "-%s" % suffix if suffix else ""
93
94     ret = prefix + name + suffix
95     return ret
96
97 def get_distro():
98     """Detect linux distribution, support "meego"
99     """
100
101     support_dists = ('SuSE',
102                      'debian',
103                      'fedora',
104                      'redhat',
105                      'centos',
106                      'meego',
107                      'moblin',
108                      'tizen')
109     #platform.linux_distribution was removed in Python 3.8, use distro.linux_distribution(full_distribution_name=False) instead.
110     (dist, ver, id) = distro.linux_distribution(full_distribution_name=False)
111
112     return (dist, ver, id)
113
114 def get_hostname():
115     """Get hostname
116     """
117     return platform.node()
118
119 def get_hostname_distro_str():
120     """Get composited string for current linux distribution
121     """
122     (dist, ver, id) = get_distro()
123     hostname = get_hostname()
124
125     if not dist:
126         return "%s(Unknown Linux Distribution)" % hostname
127     else:
128         distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
129         return distro_str.strip()
130
131 _LOOP_RULE_PTH = None
132
133 def hide_loopdev_presentation():
134     udev_rules = "80-prevent-loop-present.rules"
135     udev_rules_dir = [
136                        '/usr/lib/udev/rules.d/',
137                        '/lib/udev/rules.d/',
138                        '/etc/udev/rules.d/'
139                      ]
140
141     global _LOOP_RULE_PTH
142
143     for rdir in udev_rules_dir:
144         if os.path.exists(rdir):
145             _LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
146
147     if not _LOOP_RULE_PTH:
148         return
149
150     try:
151         with open(_LOOP_RULE_PTH, 'w') as wf:
152             wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
153
154         runner.quiet('udevadm trigger')
155     except:
156         pass
157
158 def unhide_loopdev_presentation():
159     #global _LOOP_RULE_PTH
160
161     if not _LOOP_RULE_PTH:
162         return
163
164     try:
165         os.unlink(_LOOP_RULE_PTH)
166         runner.quiet('udevadm trigger')
167     except:
168         pass
169
170 def extract_rpm(rpmfile, targetdir):
171     rpm2cpio = find_binary_path("rpm2cpio")
172     cpio = find_binary_path("cpio")
173
174     olddir = os.getcwd()
175     os.chdir(targetdir)
176
177     msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
178     p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
179     p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
180                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
181     p1.stdout.close()
182     (sout, serr) = p2.communicate()
183     msger.verbose(sout.decode() or serr.decode())
184
185     os.chdir(olddir)
186
187 def human_size(size):
188     """Return human readable string for Bytes size
189     """
190
191     if size <= 0:
192         return "0M"
193     import math
194     measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
195     expo = int(math.log(size, 1024))
196     mant = float(size/math.pow(1024, expo))
197     return "{0:.1f}{1:s}".format(mant, measure[expo])
198
199 def get_block_size(file_obj):
200     """ Returns block size for file object 'file_obj'. Errors are indicated by
201     the 'IOError' exception. """
202
203     from fcntl import ioctl
204     import struct
205
206     # Get the block size of the host file-system for the image file by calling
207     # the FIGETBSZ ioctl (number 2).
208     binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
209     return struct.unpack('I', binary_data)[0]
210
211 def check_space_pre_cp(src, dst):
212     """Check whether disk space is enough before 'cp' like
213     operations, else exception will be raised.
214     """
215
216     srcsize  = get_file_size(src) * 1024 * 1024
217     freesize = get_filesystem_avail(dst)
218     if srcsize > freesize:
219         raise CreatorError("space on %s(%s) is not enough for about %s files"
220                            % (dst, human_size(freesize), human_size(srcsize)))
221
222 def calc_hashes(file_path, hash_names, start = 0, end = None):
223     """ Calculate hashes for a file. The 'file_path' argument is the file
224     to calculate hash functions for, 'start' and 'end' are the starting and
225     ending file offset to calculate the has functions for. The 'hash_names'
226     argument is a list of hash names to calculate. Returns the the list
227     of calculated hash values in the hexadecimal form in the same order
228     as 'hash_names'.
229     """
230     if end == None:
231         end = os.path.getsize(file_path)
232
233     chunk_size = 65536
234     to_read = end - start
235     read = 0
236
237     hashes = []
238     for hash_name in hash_names:
239         hashes.append(hashlib.new(hash_name))
240
241     with open(file_path, "rb") as f:
242         f.seek(start)
243
244         while read < to_read:
245             if read + chunk_size > to_read:
246                 chunk_size = to_read - read
247             chunk = f.read(chunk_size)
248             for hash_obj in hashes:
249                 hash_obj.update(chunk)
250             read += chunk_size
251
252     result = []
253     for hash_obj in hashes:
254         result.append(hash_obj.hexdigest())
255
256     return result
257
258 def get_md5sum(fpath):
259     return calc_hashes(fpath, ('md5', ))[0]
260
261 def get_sha1sum(fpath):
262     return calc_hashes(fpath, ('sha1', ))[0]
263
264 def get_sha256sum(fpath):
265     return calc_hashes(fpath, ('sha256', ))[0]
266
267 def normalize_ksfile(ksconf, release, arch):
268     '''
269     Return the name of a normalized ks file in which macro variables
270     @BUILD_ID@ and @ARCH@ are replace with real values.
271
272     The original ks file is returned if no special macro is used, otherwise
273     a temp file is created and returned, which will be deleted when program
274     exits normally.
275     '''
276
277     if not release:
278         release = "latest"
279     if not arch or re.match(r'i.86', arch):
280         arch = "ia32"
281
282     with open(ksconf) as f:
283         ksc = f.read()
284
285     if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
286         return ksconf
287
288     msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
289     ksc = ksc.replace("@ARCH@", arch)
290     ksc = ksc.replace("@BUILD_ID@", release)
291
292     fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
293     os.write(fd, ksc.encode())
294     os.close(fd)
295
296     msger.debug('normalized ks file:%s' % ksconf)
297
298     def remove_temp_ks():
299         try:
300             os.unlink(ksconf)
301         except OSError as err:
302             msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
303
304     import atexit
305     atexit.register(remove_temp_ks)
306
307     return ksconf
308
309
310 def _check_mic_chroot(rootdir):
311     def _path(path):
312         return rootdir.rstrip('/') + path
313
314     release_files = list(map(_path, [ "/etc/moblin-release",
315                                  "/etc/meego-release",
316                                  "/etc/tizen-release"]))
317
318     if not any(map(os.path.exists, release_files)):
319         msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
320
321     if not glob.glob(rootdir + "/boot/vmlinuz-*"):
322         msger.warning("Failed to find kernel module under %s" % rootdir)
323
324     return
325
326 def selinux_check(arch, fstypes):
327     try:
328         getenforce = find_binary_path('getenforce')
329     except CreatorError:
330         return
331
332     selinux_status = runner.outs([getenforce])
333     if arch and arch.startswith("arm") and selinux_status == "Enforcing":
334         raise CreatorError("Can't create arm image if selinux is enabled, "
335                            "please run 'setenforce 0' to disable selinux")
336
337     use_btrfs = [typ for typ in fstypes if typ == 'btrfs']
338     if use_btrfs and selinux_status == "Enforcing":
339         raise CreatorError("Can't create btrfs image if selinux is enabled,"
340                            " please run 'setenforce 0' to disable selinux")
341
342 def get_image_type(path):
343     def _get_extension_name(path):
344         match = re.search("(?<=\.)\w+$", path)
345         if match:
346             return match.group(0)
347         else:
348             return None
349
350     if os.path.isdir(path):
351         _check_mic_chroot(path)
352         return "fs"
353
354     maptab = {
355               "tar": "loop",
356               "raw":"raw",
357               "vmdk":"vmdk",
358               "vdi":"vdi",
359               "iso":"livecd",
360               "usbimg":"liveusb",
361              }
362
363     extension = _get_extension_name(path)
364     if extension in maptab:
365         return maptab[extension]
366
367     fd = open(path, "rb")
368     file_header = fd.read(1024)
369     fd.close()
370     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
371     if file_header[0:len(vdi_flag)] == vdi_flag:
372         return maptab["vdi"]
373
374     #Checking f2fs fs type.
375     blkidcmd = find_binary_path("blkid")
376     out = runner.outs([blkidcmd, '-o', 'value', '-s', 'TYPE', path])
377     if out == "f2fs":
378         return "f2fsimg"
379     output = runner.outs(['file', path])
380     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
381     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
382     rawptn = re.compile(r".*x86 boot sector.*")
383     vmdkptn = re.compile(r".*VMware. disk image.*")
384     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
385     ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
386     btrfsimgptn = re.compile(r".*BTRFS.*")
387     if isoptn.match(output):
388         return maptab["iso"]
389     elif usbimgptn.match(output):
390         return maptab["usbimg"]
391     elif rawptn.match(output):
392         return maptab["raw"]
393     elif vmdkptn.match(output):
394         return maptab["vmdk"]
395     elif ext3fsimgptn.match(output):
396         return "ext3fsimg"
397     elif ext4fsimgptn.match(output):
398         return "ext4fsimg"
399     elif btrfsimgptn.match(output):
400         return "btrfsimg"
401     else:
402         raise CreatorError("Cannot detect the type of image: %s" % path)
403
404
405 def get_file_size(filename):
406     """ Return size in MB unit """
407     cmd = ['du', "-s", "-b", "-B", "1M", filename]
408     rc, duOutput  = runner.runtool(cmd)
409     if rc != 0:
410         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
411     size1 = int(duOutput.split()[0])
412
413     cmd = ['du', "-s", "-B", "1M", filename]
414     rc, duOutput = runner.runtool(cmd)
415     if rc != 0:
416         raise CreatorError("Failed to run: %s" % ' '.join(cmd))
417
418     size2 = int(duOutput.split()[0])
419     return max(size1, size2)
420
421
422 def get_filesystem_avail(fs):
423     vfstat = os.statvfs(fs)
424     return vfstat.f_bavail * vfstat.f_bsize
425
426 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
427     #convert disk format
428     if dstfmt != "raw":
429         raise CreatorError("Invalid destination image format: %s" % dstfmt)
430     msger.debug("converting %s image to %s" % (srcimg, dstimg))
431     if srcfmt == "vmdk":
432         path = find_binary_path("qemu-img")
433         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
434     elif srcfmt == "vdi":
435         path = find_binary_path("VBoxManage")
436         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
437     else:
438         raise CreatorError("Invalid soure image format: %s" % srcfmt)
439
440     rc = runner.show(argv)
441     if rc == 0:
442         msger.debug("convert successful")
443     if rc != 0:
444         raise CreatorError("Unable to convert disk to %s" % dstfmt)
445
446 def uncompress_squashfs(squashfsimg, outdir):
447     """Uncompress file system from squshfs image"""
448     unsquashfs = find_binary_path("unsquashfs")
449     args = [ unsquashfs, "-d", outdir, squashfsimg ]
450     rc = runner.show(args)
451     if (rc != 0):
452         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
453
454 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
455     """ FIXME: use the dir in mic.conf instead """
456
457     makedirs(dir)
458     return tempfile.mkdtemp(dir = dir, prefix = prefix)
459
460 def get_repostrs_from_ks(ks):
461     def _get_temp_reponame(baseurl):
462         md5obj = hashlib.md5(baseurl)
463         tmpreponame = "%s" % md5obj.hexdigest()
464         return tmpreponame
465
466     kickstart_repos = []
467
468     for repodata in ks.handler.repo.repoList:
469         repo = {}
470         for attr in ('name',
471                      'baseurl',
472                      'mirrorlist',
473                      'includepkgs', # val is list
474                      'excludepkgs', # val is list
475                      'cost',    # int
476                      'priority',# int
477                      'save',
478                      'proxy',
479                      'proxyuser',
480                      'proxypasswd',
481                      'proxypasswd',
482                      'debuginfo',
483                      'source',
484                      'gpgkey',
485                      'ssl_verify'):
486             if hasattr(repodata, attr) and getattr(repodata, attr):
487                 repo[attr] = getattr(repodata, attr)
488
489         if 'name' not in repo:
490             repo['name'] = _get_temp_reponame(repodata.baseurl)
491         if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
492             repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
493                                       getattr(repodata, 'user', None),
494                                       getattr(repodata, 'passwd', None))
495
496         kickstart_repos.append(repo)
497
498     return kickstart_repos
499
500 def _get_uncompressed_data_from_url(url, filename, proxies):
501     filename = myurlgrab(url.full, filename, proxies)
502     suffix = None
503     if filename.endswith(".gz"):
504         suffix = ".gz"
505         runner.quiet(['gunzip', "-f", filename])
506     elif filename.endswith(".bz2"):
507         suffix = ".bz2"
508         runner.quiet(['bunzip2', "-f", filename])
509     if suffix:
510         filename = filename.replace(suffix, "")
511     return filename
512
513 def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
514                             sumtype=None, checksum=None):
515     url = baseurl.join(filename)
516     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
517     if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
518         filename = os.path.splitext(filename_tmp)[0]
519     else:
520         filename = filename_tmp
521     if sumtype and checksum and os.path.exists(filename):
522         try:
523             sumcmd = find_binary_path("%ssum" % sumtype)
524         except:
525             file_checksum = None
526         else:
527             file_checksum = runner.outs([sumcmd, filename]).split()[0]
528
529         if file_checksum and file_checksum == checksum:
530             return filename
531
532     return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
533
534 def get_metadata_from_repos(repos, cachedir):
535     my_repo_metadata = []
536     for repo in repos:
537         reponame = repo.name
538         baseurl = repo.baseurl
539
540         if hasattr(repo, 'proxy'):
541             proxy = repo.proxy
542         else:
543             proxy = get_proxy_for(baseurl)
544
545         proxies = None
546         if proxy:
547             proxies = {str(baseurl.split(":")[0]): str(proxy)}
548
549         makedirs(os.path.join(cachedir, reponame))
550         url = baseurl.join("repodata/repomd.xml")
551         filename = os.path.join(cachedir, reponame, 'repomd.xml')
552         repomd = myurlgrab(url.full, filename, proxies)
553         try:
554             root = xmlparse(repomd)
555         except SyntaxError:
556             raise CreatorError("repomd.xml syntax error.")
557
558         ns = root.getroot().tag
559         ns = ns[0:ns.rindex("}")+1]
560
561         filepaths = {}
562         checksums = {}
563         sumtypes = {}
564
565         for elm in root.iter("%sdata" % ns):
566             if elm.attrib["type"] == "patterns":
567                 filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
568                 checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
569                 sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
570                 break
571
572         for elm in root.iter("%sdata" % ns):
573             #"group" type has no "open-checksum" filed, remove it.
574             if elm.attrib["type"] == "group_gz":
575                 filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
576                 checksums['comps'] = elm.find("%sopen-checksum" % ns).text
577                 sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
578                 break
579
580         primary_type = None
581         for elm in root.iter("%sdata" % ns):
582             if elm.attrib["type"] in ("primary_db", "primary"):
583                 primary_type = elm.attrib["type"]
584                 filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
585                 checksums['primary'] = elm.find("%sopen-checksum" % ns).text
586                 sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
587                 break
588
589         if not primary_type:
590             continue
591
592         for item in ("primary", "patterns", "comps"):
593             if item not in filepaths:
594                 filepaths[item] = None
595                 continue
596             if not filepaths[item]:
597                 continue
598             filepaths[item] = _get_metadata_from_repo(baseurl,
599                                                       proxies,
600                                                       cachedir,
601                                                       reponame,
602                                                       filepaths[item],
603                                                       sumtypes[item],
604                                                       checksums[item])
605
606         """ Get repo key """
607         try:
608             repokey = _get_metadata_from_repo(baseurl,
609                                               proxies,
610                                               cachedir,
611                                               reponame,
612                                               "repodata/repomd.xml.key")
613         except CreatorError:
614             repokey = None
615             msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
616
617         my_repo_metadata.append({"name":reponame,
618                                  "baseurl":baseurl,
619                                  "repomd":repomd,
620                                  "primary":filepaths['primary'],
621                                  "cachedir":cachedir,
622                                  "proxies":proxies,
623                                  "patterns":filepaths['patterns'],
624                                  "comps":filepaths['comps'],
625                                  "repokey":repokey,
626                                  "priority":repo.priority})
627
628     return my_repo_metadata
629
630 def get_rpmver_in_repo(repometadata):
631     for repo in repometadata:
632         if repo["primary"].endswith(".xml"):
633             root = xmlparse(repo["primary"])
634             ns = root.getroot().tag
635             ns = ns[0:ns.rindex("}")+1]
636
637             versionlist = []
638             for elm in root.iter("%spackage" % ns):
639                 if elm.find("%sname" % ns).text == 'rpm':
640                     for node in list(elm):
641                         if node.tag == "%sversion" % ns:
642                             versionlist.append(node.attrib['ver'])
643
644             if versionlist:
645                 return next(reversed(
646                          sorted(
647                            versionlist,
648                            key = lambda ver: list(map(int, ver.split('.'))))))
649
650         elif repo["primary"].endswith(".sqlite"):
651             con = sqlite.connect(repo["primary"])
652             for row in con.execute("select version from packages where "
653                                    "name=\"rpm\" ORDER by version DESC"):
654                 con.close()
655                 return row[0]
656
657     return None
658
659 def get_arch(repometadata):
660     archlist = []
661     for repo in repometadata:
662         if repo["primary"].endswith(".xml"):
663             root = xmlparse(repo["primary"])
664             ns = root.getroot().tag
665             ns = ns[0:ns.rindex("}")+1]
666             for elm in root.iter("%spackage" % ns):
667                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
668                     arch = elm.find("%sarch" % ns).text
669                     if arch not in archlist:
670                         archlist.append(arch)
671         elif repo["primary"].endswith(".sqlite"):
672             con = sqlite.connect(repo["primary"])
673             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
674                 if row[0] not in archlist:
675                     archlist.append(row[0])
676
677             con.close()
678
679     uniq_arch = []
680     for i in range(len(archlist)):
681         if archlist[i] not in list(rpmmisc.archPolicies.keys()):
682             continue
683         need_append = True
684         j = 0
685         while j < len(uniq_arch):
686             if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
687                 need_append = False
688                 break
689             if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
690                 if need_append:
691                     uniq_arch[j] = archlist[i]
692                     need_append = False
693                 else:
694                     uniq_arch.remove(uniq_arch[j])
695                     continue
696             j += 1
697         if need_append:
698              uniq_arch.append(archlist[i])
699
700     return uniq_arch, archlist
701
702 def get_package(pkg, repometadata, arch = None):
703     ver = ""
704     priority = 99
705     target_repo = None
706     if not arch:
707         arches = []
708     elif arch not in rpmmisc.archPolicies:
709         arches = [arch]
710     else:
711         arches = rpmmisc.archPolicies[arch].split(':')
712         arches.append('noarch')
713
714     for repo in repometadata:
715         if repo["primary"].endswith(".xml"):
716             root = xmlparse(repo["primary"])
717             ns = root.getroot().tag
718             ns = ns[0:ns.rindex("}")+1]
719             for elm in root.iter("%spackage" % ns):
720                 if elm.find("%sname" % ns).text == pkg and elm.find("%sarch" % ns).text in arches:
721                     if repo["priority"] != None:
722                         tmpprior = int(repo["priority"])
723                         if tmpprior < priority:
724                             priority = tmpprior
725                             location = elm.find("%slocation" % ns)
726                             pkgpath = "%s" % location.attrib['href']
727                             target_repo = repo
728                             break
729                         elif tmpprior > priority:
730                             break
731                     version = elm.find("%sversion" % ns)
732                     tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
733                     if tmpver > ver:
734                         ver = tmpver
735                         location = elm.find("%slocation" % ns)
736                         pkgpath = "%s" % location.attrib['href']
737                         target_repo = repo
738                     break
739         if repo["primary"].endswith(".sqlite"):
740             con = sqlite.connect(repo["primary"])
741             if arch:
742                 sql = 'select version, release, location_href from packages ' \
743                       'where name = "%s" and arch IN ("%s")' % \
744                       (pkg, '","'.join(arches))
745                 for row in con.execute(sql):
746                     tmpver = "%s-%s" % (row[0], row[1])
747                     if tmpver > ver:
748                         ver = tmpver
749                         pkgpath = "%s" % row[2]
750                         target_repo = repo
751                     break
752             else:
753                 sql = 'select version, release, location_href from packages ' \
754                       'where name = "%s"' % pkg
755                 for row in con.execute(sql):
756                     tmpver = "%s-%s" % (row[0], row[1])
757                     if tmpver > ver:
758                         ver = tmpver
759                         pkgpath = "%s" % row[2]
760                         target_repo = repo
761                     break
762             con.close()
763     if target_repo:
764         makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
765         url = target_repo["baseurl"].join(pkgpath)
766         filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
767         if os.path.exists(filename):
768             ret = rpmmisc.checkRpmIntegrity('rpm', filename)
769             if ret == 0:
770                 return filename
771
772             msger.warning("package %s is damaged: %s" %
773                           (os.path.basename(filename), filename))
774             os.unlink(filename)
775
776         pkg = myurlgrab(url.full, filename, target_repo["proxies"])
777         return pkg
778     else:
779         return None
780
781 def get_source_name(pkg, repometadata):
782
783     def get_bin_name(pkg):
784         m = RPM_RE.match(pkg)
785         if m:
786             return m.group(1)
787         return None
788
789     def get_src_name(srpm):
790         m = SRPM_RE.match(srpm)
791         if m:
792             return m.group(1)
793         return None
794
795     ver = ""
796     target_repo = None
797
798     pkg_name = get_bin_name(pkg)
799     if not pkg_name:
800         return None
801
802     for repo in repometadata:
803         if repo["primary"].endswith(".xml"):
804             root = xmlparse(repo["primary"])
805             ns = root.getroot().tag
806             ns = ns[0:ns.rindex("}")+1]
807             for elm in root.iter("%spackage" % ns):
808                 if elm.find("%sname" % ns).text == pkg_name:
809                     if elm.find("%sarch" % ns).text != "src":
810                         version = elm.find("%sversion" % ns)
811                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
812                         if tmpver > ver:
813                             ver = tmpver
814                             fmt = elm.find("%sformat" % ns)
815                             if fmt:
816                                 fns = list(fmt)[0].tag
817                                 fns = fns[0:fns.rindex("}")+1]
818                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
819                                 target_repo = repo
820                         break
821
822         if repo["primary"].endswith(".sqlite"):
823             con = sqlite.connect(repo["primary"])
824             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
825                 tmpver = "%s-%s" % (row[0], row[1])
826                 if tmpver > ver:
827                     pkgpath = "%s" % row[2]
828                     target_repo = repo
829                 break
830             con.close()
831     if target_repo:
832         return get_src_name(pkgpath)
833     else:
834         return None
835
836 def get_pkglist_in_patterns(group, patterns):
837     found = False
838     pkglist = []
839     try:
840         root = xmlparse(patterns)
841     except SyntaxError:
842         raise SyntaxError("%s syntax error." % patterns)
843
844     for elm in list(root.getroot()):
845         ns = elm.tag
846         ns = ns[0:ns.rindex("}")+1]
847         name = elm.find("%sname" % ns)
848         summary = elm.find("%ssummary" % ns)
849         if name.text == group or summary.text == group:
850             found = True
851             break
852
853     if not found:
854         return pkglist
855
856     found = False
857     for requires in list(elm):
858         if requires.tag.endswith("requires"):
859             found = True
860             break
861
862     if not found:
863         return pkglist
864
865     for pkg in list(requires):
866         pkgname = pkg.attrib["name"]
867         if pkgname not in pkglist:
868             pkglist.append(pkgname)
869
870     return pkglist
871
872 def get_pkglist_in_comps(group, comps):
873     found = False
874     pkglist = []
875     try:
876         root = xmlparse(comps)
877     except SyntaxError:
878         raise SyntaxError("%s syntax error." % comps)
879
880     for elm in root.iter("group"):
881         id = elm.find("id")
882         name = elm.find("name")
883         if id.text == group or name.text == group:
884             packagelist = elm.find("packagelist")
885             found = True
886             break
887
888     if not found:
889         return pkglist
890
891     for require in elm.iter("packagereq"):
892         if require.tag.endswith("packagereq"):
893             pkgname = require.text
894         if pkgname not in pkglist:
895             pkglist.append(pkgname)
896
897     return pkglist
898
899 def is_statically_linked(binary):
900     return ", statically linked, " in runner.outs(['file', binary])
901
902 def get_qemu_arm_binary(arch):
903     if arch == "aarch64":
904         node = "/proc/sys/fs/binfmt_misc/aarch64"
905         if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
906             arm_binary = "qemu-arm64"
907         elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
908             arm_binary = "qemu-aarch64"
909         elif os.path.exists("/usr/bin/qemu-arm64-static"):
910             arm_binary = "qemu-arm64-static"
911         elif os.path.exists("/usr/bin/qemu-aarch64-static"):
912             arm_binary = "qemu-aarch64-static"
913         else:
914             raise CreatorError("Please install a statically-linked %s" % arm_binary)
915     elif arch == "mipsel":
916         node = "/proc/sys/fs/binfmt_misc/mipsel"
917         arm_binary = "qemu-mipsel"
918         if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
919             arm_binary = "qemu-mipsel-static"
920         if not os.path.exists("/usr/bin/%s" % arm_binary):
921             raise CreatorError("Please install a statically-linked %s" % arm_binary)
922     elif arch == "riscv64":
923         node = "/proc/sys/fs/binfmt_misc/riscv64"
924         arm_binary = "qemu-riscv64"
925         if os.path.exists("/usr/bin/qemu-riscv64") and is_statically_linked("/usr/bin/qemu-riscv64"):
926             arm_binary = "qemu-riscv64"
927         elif os.path.exists("/usr/bin/qemu-riscv64-static"):
928             arm_binary = "qemu-riscv64-static"
929         else:
930             raise CreatorError("Please install a statically-linked %s" % arm_binary)
931     else:
932         node = "/proc/sys/fs/binfmt_misc/arm"
933         arm_binary = "qemu-arm"
934         if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
935             arm_binary = "qemu-arm-static"
936         if not os.path.exists("/usr/bin/%s" % arm_binary):
937             raise CreatorError("Please install a statically-linked %s" % arm_binary)
938
939     return (arm_binary, node)
940
941 def setup_qemu_emulator(rootdir, arch):
942     qemu_emulators = []
943     # mount binfmt_misc if it doesn't exist
944     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
945         modprobecmd = find_binary_path("modprobe")
946         runner.show([modprobecmd, "binfmt_misc"])
947     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
948         mountcmd = find_binary_path("mount")
949         runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
950
951     # qemu_emulator is a special case, we can't use find_binary_path
952     # qemu emulator should be a statically-linked executable file
953     try:
954         arm_binary, node = get_qemu_arm_binary(arch)
955     except CreatorError as err:
956         msger.error(err)
957     qemu_emulator = "/usr/bin/%s" % arm_binary
958
959     if not os.path.exists(rootdir + "/usr/bin"):
960         makedirs(rootdir + "/usr/bin")
961     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
962     qemu_emulators.append(qemu_emulator)
963
964     # disable selinux, selinux will block qemu emulator to run
965     if os.path.exists("/usr/sbin/setenforce"):
966         msger.info('Try to disable selinux')
967         runner.show(["/usr/sbin/setenforce", "0"])
968
969     # register qemu emulator for interpreting other arch executable file
970     if not os.path.exists(node):
971         if arch == "aarch64":
972             qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
973         elif arch == "mipsel":
974             qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
975         elif arch == "riscv64":
976             qemu_arm_string = ":riscv64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xf3\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
977         else:
978             qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
979
980         with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
981             fd.write(qemu_arm_string)
982     else:
983         flags = ""
984         interpreter = ""
985         with open(node, "r") as fd:
986             for line in fd.readlines():
987                 if line.startswith("flags:"):
988                     flags = line[len("flags:"):].strip()
989                 elif line.startswith("interpreter"):
990                     interpreter = line[len("interpreter"):].strip()
991
992         if flags == "P" and interpreter.endswith("-binfmt"):
993             # copy binfmt wrapper when preserve-argv[0] flag is enabled
994             shutil.copy(os.path.realpath(interpreter), rootdir + interpreter)
995             qemu_emulators.append(interpreter)
996         elif not flags and interpreter != qemu_emulator:
997             # create symlink as registered qemu emulator
998             os.symlink(qemu_emulator, rootdir + interpreter)
999             qemu_emulators.append(interpreter)
1000
1001     return qemu_emulators
1002
1003 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1004     def get_source_repometadata(repometadata):
1005         src_repometadata=[]
1006         for repo in repometadata:
1007             if repo["name"].endswith("-source"):
1008                 src_repometadata.append(repo)
1009         if src_repometadata:
1010             return src_repometadata
1011         return None
1012
1013     def get_src_name(srpm):
1014         m = SRPM_RE.match(srpm)
1015         if m:
1016             return m.group(1)
1017         return None
1018
1019     src_repometadata = get_source_repometadata(repometadata)
1020
1021     if not src_repometadata:
1022         msger.warning("No source repo found")
1023         return None
1024
1025     src_pkgs = []
1026     lpkgs_dict = {}
1027     lpkgs_path = []
1028     for repo in src_repometadata:
1029         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1030         lpkgs_path += glob.glob(cachepath)
1031
1032     for lpkg in lpkgs_path:
1033         lpkg_name = get_src_name(os.path.basename(lpkg))
1034         lpkgs_dict[lpkg_name] = lpkg
1035     localpkgs = list(lpkgs_dict.keys())
1036
1037     cached_count = 0
1038     destdir = instroot+'/usr/src/SRPMS'
1039     if not os.path.exists(destdir):
1040         os.makedirs(destdir)
1041
1042     srcpkgset = set()
1043     for _pkg in pkgs:
1044         srcpkg_name = get_source_name(_pkg, repometadata)
1045         if not srcpkg_name:
1046             continue
1047         srcpkgset.add(srcpkg_name)
1048
1049     for pkg in list(srcpkgset):
1050         if pkg in localpkgs:
1051             cached_count += 1
1052             shutil.copy(lpkgs_dict[pkg], destdir)
1053             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1054         else:
1055             src_pkg = get_package(pkg, src_repometadata, 'src')
1056             if src_pkg:
1057                 shutil.copy(src_pkg, destdir)
1058                 src_pkgs.append(src_pkg)
1059     msger.info("%d source packages gotten from cache" % cached_count)
1060
1061     return src_pkgs
1062
1063 def strip_end(text, suffix):
1064     if not text.endswith(suffix):
1065         return text
1066     return text[:-len(suffix)]
1067
1068 def strip_archive_suffix(filename):
1069     for suffix in get_archive_suffixes():
1070         if filename.endswith(suffix):
1071             return filename[:-len(suffix)]
1072     else:
1073         msger.warning("Not supported archive file format: %s" % filename)
1074     return None