4 # Copyright (c) 2011, Novell Inc.
6 # This program is licensed under the BSD license, read LICENSE.BSD
7 # for further information
10 # pysolv a little software installer demoing the sat solver library/bindings
13 # - understands globs for package names / dependencies
14 # - understands .arch suffix
15 # - repository data caching
16 # - on demand loading of secondary repository data
17 # - checksum verification
19 # - installation of commandline packages
21 # things not yet ported:
24 # - fastestmirror implementation
26 # things available in the library but missing from pysolv:
27 # - vendor policy loading
28 # - soft locks file handling
29 # - multi version handling
42 from solv import Pool, Repo, Dataiterator, Job, Solver, Transaction
43 from iniparse import INIConfig
44 from optparse import OptionParser
47 #gc.set_debug(gc.DEBUG_LEAK)
49 def calc_cookie_file(filename):
50 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
52 chksum.add_stat(filename)
55 def calc_cookie_fp(fp):
56 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
60 class repo_generic(dict):
61 def __init__(self, name, type, attribs = {}):
67 def cachepath(self, ext = None):
68 path = re.sub(r'^\.', '_', self.name)
70 path += "_" + ext + ".solvx"
73 return "/var/cache/solv/" + re.sub(r'[/]', '_', path)
76 self.handle = pool.add_repo(self.name)
77 self.handle.appdata = self
78 self.handle.priority = 99 - self['priority']
79 if self['autorefresh']:
83 st = os.stat(self.cachepath())
84 if time.time() - st[ST_MTIME] < self['metadata_expire']:
89 if not dorefresh and self.usecachedrepo(None):
90 print "repo: '%s': cached" % self.name
92 return self.load_if_changed()
94 def load_if_changed(self):
97 def load_ext(repodata):
100 def setfromurls(self, urls):
104 print "[using mirror %s]" % re.sub(r'^(.*?/...*?)/.*$', r'\1', url)
105 self['baseurl'] = url
107 def setfrommetalink(self, metalink):
108 nf = self.download(metalink, False, None)
111 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
115 for l in f.readlines():
117 m = re.match(r'^<hash type="sha256">([0-9a-fA-F]{64})</hash>', l)
119 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256, m.group(1))
120 m = re.match(r'^<url.*>(https?://.+)repodata/repomd.xml</url>', l)
122 urls.append(m.group(1))
124 chksum = None # in case the metalink is about a different file
126 self.setfromurls(urls)
129 def setfrommirrorlist(self, mirrorlist):
130 nf = self.download(mirrorlist, False, None)
133 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
136 for l in f.readline():
138 if l[0:6] == 'http://' or l[0:7] == 'https://':
140 self.setfromurls(urls)
143 def download(self, file, uncompress, chksum, markincomplete=False):
145 if 'baseurl' not in self:
146 if 'metalink' in self:
147 if file != self['metalink']:
148 metalinkchksum = self.setfrommetalink(self['metalink'])
149 if file == 'repodata/repomd.xml' and metalinkchksum and not chksum:
150 chksum = metalinkchksum
153 elif 'mirrorlist' in self:
154 if file != self['mirrorlist']:
155 self.setfrommirrorlist(self['mirrorlist'])
159 if 'baseurl' not in self:
160 print "%s: no baseurl" % self.name
162 url = re.sub(r'/$', '', self['baseurl']) + '/' + file
163 f = tempfile.TemporaryFile()
164 st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
165 if os.lseek(f.fileno(), 0, os.SEEK_CUR) == 0 and (st == 0 or not chksum):
167 os.lseek(f.fileno(), 0, os.SEEK_SET)
169 print "%s: download error %d" % (file, st)
171 self['incomplete'] = True
174 fchksum = solv.Chksum(chksum.type)
176 print "%s: unknown checksum type" % file
178 self['incomplete'] = True
180 fchksum.add_fd(f.fileno())
181 if not fchksum.matches(chksum):
182 print "%s: checksum mismatch" % file
184 self['incomplete'] = True
187 return solv.xfopen_fd(file, os.dup(f.fileno()))
188 return solv.xfopen_fd("", os.dup(f.fileno()))
190 def usecachedrepo(self, ext, mark=False):
192 cookie = self['cookie']
194 cookie = self['extcookie']
196 repopath = self.cachepath(ext)
197 f = open(repopath, 'r')
198 f.seek(-32, os.SEEK_END)
200 if len(fcookie) != 32:
202 if cookie and fcookie != cookie:
204 if self.type != 'system' and not ext:
205 f.seek(-32 * 2, os.SEEK_END)
206 fextcookie = f.read(32)
207 if len(fextcookie) != 32:
212 flags = Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES
214 flags |= Repo.REPO_LOCALPOOL
215 if not self.handle.add_solv(f, flags):
217 if self.type != 'system' and not ext:
218 self['cookie'] = fcookie
219 self['extcookie'] = fextcookie
221 # no futimes in python?
223 os.utime(repopath, None)
230 def genextcookie(self, f):
231 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
232 chksum.add(self['cookie'])
234 stat = os.fstat(f.fileno())
235 chksum.add(str(stat[ST_DEV]))
236 chksum.add(str(stat[ST_INO]))
237 chksum.add(str(stat[ST_SIZE]))
238 chksum.add(str(stat[ST_MTIME]))
239 extcookie = chksum.raw()
240 # compatibility to c code
241 if ord(extcookie[0]) == 0:
242 extcookie[0] = chr(1)
243 self['extcookie'] = extcookie
245 def writecachedrepo(self, ext, info=None):
247 if not os.path.isdir("/var/cache/solv"):
248 os.mkdir("/var/cache/solv", 0755)
249 (fd, tmpname) = tempfile.mkstemp(prefix='.newsolv-', dir='/var/cache/solv')
251 f = os.fdopen(fd, 'w+')
256 else: # rewrite_repos case
257 self.handle.write_first_repodata(f)
258 if self.type != 'system' and not ext:
259 if 'extcookie' not in self:
261 f.write(self['extcookie'])
263 f.write(self['cookie'])
265 f.write(self['extcookie'])
267 if self.handle.iscontiguous():
268 # switch to saved repo to activate paging and save memory
269 nf = solv.xfopen(tmpname)
273 if not self.handle.add_solv(nf, Repo.SOLV_ADD_NO_STUBS):
274 sys.exit("internal error, cannot reload solv file")
277 # need to extend to repo boundaries, as this is how
278 # info.write() has written the data
279 info.extend_to_repo()
280 # LOCALPOOL does not help as pool already contains all ids
281 info.add_solv(nf, Repo.REPO_EXTEND_SOLVABLES)
283 os.rename(tmpname, self.cachepath(ext))
288 def updateaddedprovides(self, addedprovides):
289 if 'incomplete' in self:
291 if 'handle' not in self:
293 if self.handle.isempty():
295 # make sure there's just one real repodata with extensions
296 repodata = self.handle.first_repodata()
299 oldaddedprovides = repodata.lookup_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES)
300 if not set(addedprovides) <= set(oldaddedprovides):
301 for id in addedprovides:
302 repodata.add_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES, id)
303 repodata.internalize()
304 self.writecachedrepo(None, repodata)
306 class repo_repomd(repo_generic):
307 def load_if_changed(self):
308 print "rpmmd repo '%s':" % self.name,
310 f = self.download("repodata/repomd.xml", False, None, None)
312 print "no repomd.xml file, skipped"
313 self.handle.free(True)
316 self['cookie'] = calc_cookie_fp(f)
317 if self.usecachedrepo(None, True):
321 self.handle.add_repomdxml(f, 0)
324 (filename, filechksum) = self.find('primary')
326 f = self.download(filename, True, filechksum, True)
328 self.handle.add_rpmmd(f, None, 0)
330 if 'incomplete' in self:
331 return False # hopeless, need good primary
332 (filename, filechksum) = self.find('updateinfo')
334 f = self.download(filename, True, filechksum, True)
336 self.handle.add_updateinfoxml(f, 0)
339 if 'incomplete' not in self:
340 self.writecachedrepo(None)
341 # must be called after writing the repo
342 self.handle.create_stubs()
345 def find(self, what):
346 di = self.handle.Dataiterator(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE, what, Dataiterator.SEARCH_STRING)
347 di.prepend_keyname(solv.REPOSITORY_REPOMD)
350 filename = d.pool.lookup_str(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_LOCATION)
351 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_CHECKSUM)
352 if filename and not chksum:
353 print "no %s file checksum!" % filename
357 return (filename, chksum)
360 def add_ext(self, repodata, what, ext):
361 filename, chksum = self.find(what)
362 if not filename and what == 'deltainfo':
363 filename, chksum = self.find('prestodelta')
366 handle = repodata.new_handle()
367 repodata.set_poolstr(handle, solv.REPOSITORY_REPOMD_TYPE, what)
368 repodata.set_str(handle, solv.REPOSITORY_REPOMD_LOCATION, filename)
369 repodata.set_checksum(handle, solv.REPOSITORY_REPOMD_CHECKSUM, chksum)
371 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOSITORY_DELTAINFO)
372 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_FLEXARRAY)
374 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
375 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
376 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
379 repodata = self.handle.add_repodata(0)
380 self.add_ext(repodata, 'deltainfo', 'DL')
381 self.add_ext(repodata, 'filelists', 'FL')
382 repodata.internalize()
384 def load_ext(self, repodata):
385 repomdtype = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE)
386 if repomdtype == 'filelists':
388 elif repomdtype == 'deltainfo':
392 sys.stdout.write("[%s:%s: " % (self.name, ext))
393 if self.usecachedrepo(ext):
394 sys.stdout.write("cached]\n")
397 sys.stdout.write("fetching]\n")
399 filename = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_LOCATION)
400 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.REPOSITORY_REPOMD_CHECKSUM)
401 f = self.download(filename, True, filechksum)
405 self.handle.add_rpmmd(f, 'FL', Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
407 self.handle.add_deltainfoxml(f, Repo.REPO_USE_LOADING)
409 self.writecachedrepo(ext, repodata)
412 class repo_susetags(repo_generic):
413 def load_if_changed(self):
414 print "susetags repo '%s':" % self.name,
416 f = self.download("content", False, None, None)
418 print "no content file, skipped"
419 self.handle.free(True)
422 self['cookie'] = calc_cookie_fp(f)
423 if self.usecachedrepo(None, True):
427 self.handle.add_content(f, 0)
430 defvendorid = self.handle.lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
431 descrdir = self.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
433 descrdir = "suse/setup/descr"
434 (filename, filechksum) = self.find('packages.gz')
436 (filename, filechksum) = self.find('packages')
438 f = self.download(descrdir + '/' + filename, True, filechksum, True)
440 self.handle.add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.SUSETAGS_RECORD_SHARES)
442 (filename, filechksum) = self.find('packages.en.gz')
444 (filename, filechksum) = self.find('packages.en')
446 f = self.download(descrdir + '/' + filename, True, filechksum, True)
448 self.handle.add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.REPO_REUSE_REPODATA|Repo.REPO_EXTEND_SOLVABLES)
450 self.handle.internalize()
452 if 'incomplete' not in self:
453 self.writecachedrepo(None)
454 # must be called after writing the repo
455 self.handle.create_stubs()
458 def find(self, what):
459 di = self.handle.Dataiterator(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, what, Dataiterator.SEARCH_STRING)
460 di.prepend_keyname(solv.SUSETAGS_FILE)
463 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.SUSETAGS_FILE_CHECKSUM)
464 return (what, chksum)
467 def add_ext(self, repodata, what, ext):
468 (filename, chksum) = self.find(what)
471 handle = repodata.new_handle()
472 repodata.set_str(handle, solv.SUSETAGS_FILE_NAME, filename)
474 repodata.set_checksum(handle, solv.SUSETAGS_FILE_CHECKSUM, chksum)
476 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_DISKUSAGE)
477 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRNUMNUMARRAY)
479 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
480 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
482 for langtag, langtagtype in [
483 (solv.SOLVABLE_SUMMARY, solv.REPOKEY_TYPE_STR),
484 (solv.SOLVABLE_DESCRIPTION, solv.REPOKEY_TYPE_STR),
485 (solv.SOLVABLE_EULA, solv.REPOKEY_TYPE_STR),
486 (solv.SOLVABLE_MESSAGEINS, solv.REPOKEY_TYPE_STR),
487 (solv.SOLVABLE_MESSAGEDEL, solv.REPOKEY_TYPE_STR),
488 (solv.SOLVABLE_CATEGORY, solv.REPOKEY_TYPE_ID)
490 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, self.handle.pool.id2langid(langtag, ext, 1))
491 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, langtagtype)
492 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
495 repodata = self.handle.add_repodata(0)
496 di = self.handle.Dataiterator(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, None, 0)
497 di.prepend_keyname(solv.SUSETAGS_FILE)
499 filename = d.match_str()
502 if filename[0:9] != "packages.":
504 if len(filename) == 11 and filename != "packages.gz":
506 elif filename[11:12] == ".":
512 self.add_ext(repodata, filename, ext)
513 repodata.internalize()
515 def load_ext(self, repodata):
516 filename = repodata.lookup_str(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME)
518 sys.stdout.write("[%s:%s: " % (self.name, ext))
519 if self.usecachedrepo(ext):
520 sys.stdout.write("cached]\n")
523 sys.stdout.write("fetching]\n")
525 defvendorid = self.handle.lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
526 descrdir = self.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
528 descrdir = "suse/setup/descr"
529 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.SUSETAGS_FILE_CHECKSUM)
530 f = self.download(descrdir + '/' + filename, True, filechksum)
533 self.handle.add_susetags(f, defvendorid, ext, Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
535 self.writecachedrepo(ext, repodata)
538 class repo_unknown(repo_generic):
539 def load(self, pool):
540 print "unsupported repo '%s': skipped" % self.name
543 class repo_system(repo_generic):
544 def load(self, pool):
545 self.handle = pool.add_repo(self.name)
546 self.handle.appdata = self
547 pool.installed = self.handle
548 print "rpm database:",
549 self['cookie'] = calc_cookie_file("/var/lib/rpm/Packages")
550 if self.usecachedrepo(None):
554 self.handle.add_products("/etc/products.d", Repo.REPO_NO_INTERNALIZE)
555 self.handle.add_rpmdb(None, Repo.REPO_REUSE_REPODATA)
556 self.writecachedrepo(None)
559 class repo_cmdline(repo_generic):
560 def load(self, pool):
561 self.handle = pool.add_repo(self.name)
562 self.handle.appdata = self
565 def validarch(pool, arch):
568 id = pool.str2id(arch, False)
571 return pool.isknownarch(id)
573 def limitjobs(pool, jobs, flags, evrstr):
575 evr = pool.str2id(evrstr)
578 sel = how & Job.SOLVER_SELECTMASK
579 what = pool.rel2id(j.what, evr, flags)
580 if flags == solv.REL_ARCH:
581 how |= Job.SOLVER_SETARCH
582 elif flags == solv.REL_EQ and sel == Job.SOLVER_SOLVABLE_NAME:
583 if evrstr.find('-') >= 0:
584 how |= Job.SOLVER_SETEVR
586 how |= Job.SOLVER_SETEV
587 njobs.append(pool.Job(how, what))
590 def limitjobs_evrarch(pool, jobs, flags, evrstr):
591 m = re.match(r'(.+)\.(.+?)$', evrstr)
592 if m and validarch(pool, m.group(2)):
593 jobs = limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
595 return limitjobs(pool, jobs, flags, evrstr)
597 def mkjobs_filelist(pool, cmd, arg):
598 if re.search(r'[[*?]', arg):
599 type = Dataiterator.SEARCH_GLOB
601 type = Dataiterator.SEARCH_STRING
603 di = pool.installed.Dataiterator(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
605 di = pool.Dataiterator(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
609 if s and s.installable():
611 di.skip_solvable() # one match is enough
613 print "[using file list match for '%s']" % arg
615 return [ pool.Job(Job.SOLVER_SOLVABLE_ONE_OF, pool.towhatprovides(matches)) ]
617 return [ pool.Job(Job.SOLVER_SOLVABLE | Job.SOLVER_NOAUTOSET, matches[0]) ]
620 def mkjobs_rel(pool, cmd, name, rel, evr):
622 if rel.find('<') >= 0: flags |= solv.REL_LT
623 if rel.find('=') >= 0: flags |= solv.REL_EQ
624 if rel.find('>') >= 0: flags |= solv.REL_GT
625 jobs = depglob(pool, name, True, True)
627 return limitjobs(pool, jobs, flags, evr)
628 m = re.match(r'(.+)\.(.+?)$', name)
629 if m and validarch(pool, m.group(2)):
630 jobs = depglob(pool, m.group(1), True, True)
632 jobs = limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
633 return limitjobs(pool, jobs, flags, evr)
636 def mkjobs_nevra(pool, cmd, arg):
637 jobs = depglob(pool, arg, True, True)
640 m = re.match(r'(.+)\.(.+?)$', arg)
641 if m and validarch(pool, m.group(2)):
642 jobs = depglob(pool, m.group(1), True, True)
644 return limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
645 m = re.match(r'(.+)-(.+?)$', arg)
647 jobs = depglob(pool, m.group(1), True, False)
649 return limitjobs_evrarch(pool, jobs, solv.REL_EQ, m.group(2))
650 m = re.match(r'(.+)-(.+?-.+?)$', arg)
652 jobs = depglob(pool, m.group(1), True, False)
654 return limitjobs_evrarch(pool, jobs, solv.REL_EQ, m.group(2))
657 def mkjobs(pool, cmd, arg):
658 if len(arg) and arg[0] == '/':
659 jobs = mkjobs_filelist(pool, cmd, arg)
662 m = re.match(r'(.+?)\s*([<=>]+)\s*(.+?)$', arg)
664 return mkjobs_rel(pool, cmd, m.group(1), m.group(2), m.group(3))
666 return mkjobs_nevra(pool, cmd, arg)
668 def depglob(pool, name, globname, globdep):
669 id = pool.str2id(name, False)
672 for s in pool.providers(id):
673 if globname and s.nameid == id:
674 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) ]
677 if globname and globdep:
678 print "[using capability match for '%s']" % name
679 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) ]
680 if not re.search(r'[[*?]', name):
685 for d in pool.Dataiterator(0, solv.SOLVABLE_NAME, name, Dataiterator.SEARCH_GLOB):
688 idmatches[s.nameid] = True
690 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) for id in sorted(idmatches.keys()) ]
692 # try dependency glob
693 idmatches = pool.matchprovidingids(name, Dataiterator.SEARCH_GLOB)
695 print "[using capability match for '%s']" % name
696 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) for id in sorted(idmatches) ]
700 def load_stub(repodata):
701 repo = repodata.repo.appdata
703 return repo.load_ext(repodata)
707 parser = OptionParser(usage="usage: solv.py [options] COMMAND")
708 (options, args) = parser.parse_args()
710 parser.print_help(sys.stderr)
727 # read all repo configs
729 for reposdir in ["/etc/zypp/repos.d"]:
730 if not os.path.isdir(reposdir):
732 for reponame in sorted(glob.glob('%s/*.repo' % reposdir)):
733 cfg = INIConfig(open(reponame))
735 repoattr = {'enabled': 0, 'priority': 99, 'autorefresh': 1, 'type': 'rpm-md', 'metadata_expire': 900}
737 repoattr[k] = cfg[alias][k]
738 if 'mirrorlist' in repoattr and 'metalink' not in repoattr:
739 if repoattr['mirrorlist'].find('/metalink'):
740 repoattr['metalink'] = repoattr['mirrorlist']
741 del repoattr['mirrorlist']
742 if repoattr['type'] == 'rpm-md':
743 repo = repo_repomd(alias, 'repomd', repoattr)
744 elif repoattr['type'] == 'yast2':
745 repo = repo_susetags(alias, 'susetags', repoattr)
747 repo = repo_unknown(alias, 'unknown', repoattr)
751 pool.setarch(os.uname()[4])
752 pool.set_loadcallback(load_stub)
754 # now load all enabled repos into the pool
755 sysrepo = repo_system('@System', 'system')
758 if int(repo['enabled']):
763 di = pool.Dataiterator(0, solv.SOLVABLE_NAME, args[0], Dataiterator.SEARCH_SUBSTRING|Dataiterator.SEARCH_NOCASE)
765 matches[d.solvid] = True
766 for solvid in sorted(matches.keys()):
767 print " - %s [%s]: %s" % (pool.solvid2str(solvid), pool.solvables[solvid].repo.name, pool.lookup_str(solvid, solv.SOLVABLE_SUMMARY))
771 if cmd == 'list' or cmd == 'info' or cmd == 'install':
773 if arg.endswith(".rpm") and os.access(arg, os.R_OK):
775 cmdlinerepo = repo_cmdline('@commandline', 'cmdline')
776 cmdlinerepo.load(pool)
777 cmdlinerepo['packages'] = {}
778 cmdlinerepo['packages'][arg] = cmdlinerepo.handle.add_rpm(arg, Repo.REPO_REUSE_REPODATA|Repo.REPO_NO_INTERNALIZE)
780 cmdlinerepo.handle.internalize()
782 addedprovides = pool.addfileprovides_ids()
784 sysrepo.updateaddedprovides(addedprovides)
786 repo.updateaddedprovides(addedprovides)
788 pool.createwhatprovides()
790 # convert arguments into jobs
793 if cmdlinerepo and arg in cmdlinerepo['packages']:
794 jobs.append(pool.Job(Job.SOLVER_SOLVABLE, cmdlinerepo['packages'][arg]))
796 njobs = mkjobs(pool, cmd, arg)
798 print "nothing matches '%s'" % arg
802 if cmd == 'list' or cmd == 'info':
804 print "no package matched."
807 for s in job.solvables():
809 print "Name: %s" % s.str()
810 print "Repo: %s" % s.repo.name
811 print "Summary: %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
812 str = s.lookup_str(solv.SOLVABLE_URL)
814 print "Url: %s" % str
815 str = s.lookup_str(solv.SOLVABLE_LICENSE)
817 print "License: %s" % str
818 print "Description:\n%s" % s.lookup_str(solv.SOLVABLE_DESCRIPTION)
821 print " - %s [%s]" % (s.str(), s.repo.name)
822 print " %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
825 if cmd == 'install' or cmd == 'erase' or cmd == 'up' or cmd == 'dup' or cmd == 'verify':
827 if cmd == 'up' or cmd == 'verify':
828 jobs = [ pool.Job(Job.SOLVER_SOLVABLE_ALL, 0) ]
832 print "no package matched."
836 # up magic: use install instead of update if no installed package matches
837 if job.how == Job.SOLVER_SOLVABLE_ALL or filter(lambda s: s.isinstalled(), job.solvables()):
838 job.how |= Job.SOLVER_UPDATE
840 job.how |= Job.SOLVER_INSTALL
841 elif cmd == 'install':
842 job.how |= Job.SOLVER_INSTALL
844 job.how |= Job.SOLVER_ERASE
846 job.how |= Job.SOLVER_DISTUPGRADE
847 elif cmd == 'verify':
848 job.how |= Job.SOLVER_VERIFY
850 #pool.set_debuglevel(2)
853 solver = pool.Solver()
854 solver.ignorealreadyrecommended = True
856 solver.allowuninstall = True
857 if cmd == 'dup' and not jobs:
858 solver.distupgrade = True
859 solver.updatesystem = True
860 solver.allowdowngrade = True
861 solver.allowvendorchange = True
862 solver.allowarchchange = True
863 solver.dosplitprovides = True
864 if cmd == 'up' and len(jobs) == 1 and jobs[0].how == (Job.SOLVER_UPDATE | Job.SOLVER_SOLVABLE_ALL):
865 solver.dosplitprovides = True
866 problems = solver.solve(jobs)
869 for problem in problems:
870 print "Problem %d:" % problem.id
871 r = problem.findproblemrule()
873 print ri.problemstr()
874 solutions = problem.solutions()
875 for solution in solutions:
876 print " Solution %d:" % solution.id
877 elements = solution.elements()
878 for element in elements:
880 if etype == Solver.SOLVER_SOLUTION_JOB:
881 print " - do not ask to", jobs[element.jobidx].str()
882 elif etype == Solver.SOLVER_SOLUTION_INFARCH:
883 if element.solvable.isinstalled():
884 print " - keep %s despite the inferior architecture" % element.solvable.str()
886 print " - install %s despite the inferior architecture" % element.solvable.str()
887 elif etype == Solver.SOLVER_SOLUTION_DISTUPGRADE:
888 if element.solvable.isinstalled():
889 print " - keep obsolete %s" % element.solvable.str()
891 print " - install %s from excluded repository" % element.solvable.str()
892 elif etype == Solver.SOLVER_SOLUTION_REPLACE:
893 illegal = element.illegalreplace()
894 if illegal & solver.POLICY_ILLEGAL_DOWNGRADE:
895 print " - allow downgrade of %s to %s" % (element.solvable.str(), element.replacement.str())
896 if illegal & solver.POLICY_ILLEGAL_ARCHCHANGE:
897 print " - allow architecture change of %s to %s" % (element.solvable.str(), element.replacement.str())
898 if illegal & solver.POLICY_ILLEGAL_VENDORCHANGE:
899 if element.replacement.vendorid:
900 print " - allow vendor change from '%s' (%s) to '%s' (%s)" % (element.solvable.vendor, element.solvable.str(), element.replacement.vendor, element.replacement.str())
902 print " - allow vendor change from '%s' (%s) to no vendor (%s)" % (element.solvable.vendor, element.solvable.str(), element.replacement.str())
904 print " - allow replacement of %s with %s" % (element.solvable.str(), element.replacement.str())
905 elif etype == Solver.SOLVER_SOLUTION_ERASE:
906 print " - allow deinstallation of %s" % element.solvable.str()
908 while not (sol == 's' or sol == 'q' or (sol.isdigit() and int(sol) >= 1 and int(sol) <= len(solutions))):
909 sys.stdout.write("Please choose a solution: ")
911 sol = sys.stdin.readline().strip()
913 continue # skip problem
916 solution = solutions[int(sol) - 1]
917 for element in solution.elements():
919 if etype == Solver.SOLVER_SOLUTION_JOB:
920 jobs[element.jobidx] = pool.Job(Job.SOLVER_NOOP, 0)
922 newjob = element.Job()
925 if job.how == newjob.how and job.what == newjob.what:
930 # no problems, show transaction
931 trans = solver.transaction()
934 print "Nothing to do."
937 print "Transaction summary:"
939 for cl in trans.classify():
940 if cl.type == Transaction.SOLVER_TRANSACTION_ERASE:
941 print "%d erased packages:" % cl.count
942 elif cl.type == Transaction.SOLVER_TRANSACTION_INSTALL:
943 print "%d installed packages:" % cl.count
944 elif cl.type == Transaction.SOLVER_TRANSACTION_REINSTALLED:
945 print "%d reinstalled packages:" % cl.count
946 elif cl.type == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
947 print "%d downgraded packages:" % cl.count
948 elif cl.type == Transaction.SOLVER_TRANSACTION_CHANGED:
949 print "%d changed packages:" % cl.count
950 elif cl.type == Transaction.SOLVER_TRANSACTION_UPGRADED:
951 print "%d upgraded packages:" % cl.count
952 elif cl.type == Transaction.SOLVER_TRANSACTION_VENDORCHANGE:
953 print "%d vendor changes from '%s' to '%s':" % (cl.count, pool.id2str(cl.fromid), pool.id2str(cl.toid))
954 elif cl.type == Transaction.SOLVER_TRANSACTION_ARCHCHANGE:
955 print "%d arch changes from '%s' to '%s':" % (cl.count, pool.id2str(cl.fromid), pool.id2str(cl.toid))
958 for p in cl.solvables():
959 if ctype == Transaction.SOLVER_TRANSACTION_UPGRADED or ctype == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
960 op = trans.othersolvable(p)
961 print " - %s -> %s" % (p.str(), op.str())
963 print " - %s" % p.str()
965 print "install size change: %d K" % trans.calc_installsizechange()
970 sys.stdout.write("OK to continue (y/n)? ")
972 yn = sys.stdin.readline().strip()
974 if yn == 'n': sys.exit(1)
975 newpkgs = trans.newpackages()
980 downloadsize += p.lookup_num(solv.SOLVABLE_DOWNLOADSIZE)
981 print "Downloading %d packages, %d K" % (len(newpkgs), downloadsize)
983 repo = p.repo.appdata
984 location, medianr = p.lookup_location()
987 if repo.type == 'commandline':
988 f = solv.xfopen(location)
990 sys.exit("\n%s: %s not found" % location)
993 if not sysrepo.handle.isempty() and os.access('/usr/bin/applydeltarpm', os.X_OK):
995 di = p.repo.Dataiterator(solv.SOLVID_META, solv.DELTA_PACKAGE_NAME, pname, Dataiterator.SEARCH_STRING)
996 di.prepend_keyname(solv.REPOSITORY_DELTAINFO)
999 if pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_EVR) != p.evrid or pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_ARCH) != p.archid:
1001 baseevrid = pool.lookup_id(solv.SOLVID_POS, solv.DELTA_BASE_EVR)
1003 for installedp in pool.providers(p.nameid):
1004 if installedp.isinstalled() and installedp.nameid == p.nameid and installedp.archid == p.archid and installedp.evrid == baseevrid:
1005 candidate = installedp
1008 seq = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_EVR) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NUM)
1009 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, '-c', '-s', seq])
1012 chksum = pool.lookup_checksum(solv.SOLVID_POS, solv.DELTA_CHECKSUM)
1015 dloc = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_DIR) + '/' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_EVR) + '.' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_SUFFIX)
1016 f = repo.download(dloc, False, chksum)
1019 nf = tempfile.TemporaryFile()
1020 nf = os.dup(nf.fileno())
1021 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, "/dev/fd/%d" % solv.xfileno(f), "/dev/fd/%d" % nf])
1023 os.lseek(nf, 0, os.SEEK_SET)
1024 newpkgsfp[p.id] = solv.xfopen_fd("", nf)
1026 if p.id in newpkgsfp:
1027 sys.stdout.write("d")
1031 if repo.type == 'susetags':
1032 datadir = repo.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DATADIR)
1035 location = datadir + '/' + location
1036 chksum = p.lookup_checksum(solv.SOLVABLE_CHECKSUM)
1037 f = repo.download(location, False, chksum)
1039 sys.exit("\n%s: %s not found in repository" % (repo.name, location))
1041 sys.stdout.write(".")
1044 print "Committing transaction:"
1046 ts = rpm.TransactionSet('/')
1047 ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
1048 erasenamehelper = {}
1049 for p in trans.steps():
1050 type = trans.steptype(p, Transaction.SOLVER_TRANSACTION_RPM_ONLY)
1051 if type == Transaction.SOLVER_TRANSACTION_ERASE:
1052 rpmdbid = p.lookup_num(solv.RPM_RPMDBID)
1053 erasenamehelper[p.name] = p
1055 sys.exit("\ninternal error: installed package %s has no rpmdbid\n" % p.str())
1056 ts.addErase(rpmdbid)
1057 elif type == Transaction.SOLVER_TRANSACTION_INSTALL:
1059 h = ts.hdrFromFdno(solv.xfileno(f))
1060 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1061 ts.addInstall(h, p, 'u')
1062 elif type == Transaction.SOLVER_TRANSACTION_MULTIINSTALL:
1064 h = ts.hdrFromFdno(solv.xfileno(f))
1065 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1066 ts.addInstall(h, p, 'i')
1067 checkproblems = ts.check()
1072 def runCallback(reason, amount, total, p, d):
1073 if reason == rpm.RPMCALLBACK_INST_OPEN_FILE:
1074 return solv.xfileno(newpkgsfp[p.id])
1075 if reason == rpm.RPMCALLBACK_INST_START:
1076 print "install", p.str()
1077 if reason == rpm.RPMCALLBACK_UNINST_START:
1078 # argh, p is just the name of the package
1079 if p in erasenamehelper:
1080 p = erasenamehelper[p]
1081 print "erase", p.str()
1082 runproblems = ts.run(runCallback, '')
1088 print "unknown command", cmd