4 # Copyright (c) 2011, Novell Inc.
6 # This program is licensed under the BSD license, read LICENSE.BSD
7 # for further information
10 # pysolv a little software installer demoing the sat solver library/bindings
13 # - understands globs for package names / dependencies
14 # - understands .arch suffix
15 # - repository data caching
16 # - on demand loading of secondary repository data
17 # - checksum verification
19 # - installation of commandline packages
21 # things not yet ported:
24 # - fastestmirror implementation
26 # things available in the library but missing from pysolv:
27 # - vendor policy loading
28 # - soft locks file handling
29 # - multi version handling
41 from solv import Pool, Repo, Dataiterator, Job, Solver, Transaction
42 from iniparse import INIConfig
43 from optparse import OptionParser
46 #gc.set_debug(gc.DEBUG_LEAK)
48 def calc_cookie_file(filename):
49 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
51 chksum.add_stat(filename)
54 def calc_cookie_fp(fp):
55 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
59 class repo_generic(dict):
60 def __init__(self, name, type, attribs = {}):
66 def cachepath(self, ext = None):
67 path = re.sub(r'^\.', '_', self.name)
69 path += "_" + ext + ".solvx"
72 return "/var/cache/solv/" + re.sub(r'[/]', '_', path)
75 self.handle = pool.add_repo(self.name)
76 self.handle.appdata = self
77 self.handle.priority = 99 - self['priority']
78 if self['autorefresh']:
82 st = os.stat(self.cachepath())
83 if time.time() - st[ST_MTIME] < self['metadata_expire']:
88 if not dorefresh and self.usecachedrepo(None):
89 print "repo: '%s': cached" % self.name
91 return self.load_if_changed()
93 def load_if_changed(self):
96 def load_ext(repodata):
99 def setfromurls(self, urls):
103 print "[using mirror %s]" % re.sub(r'^(.*?/...*?)/.*$', r'\1', url)
104 self['baseurl'] = url
106 def setfrommetalink(self, metalink):
107 nf = self.download(metalink, False, None)
110 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
114 for l in f.readlines():
116 m = re.match(r'^<hash type="sha256">([0-9a-fA-F]{64})</hash>', l)
118 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256, m.group(1))
119 m = re.match(r'^<url.*>(https?://.+)repodata/repomd.xml</url>', l)
121 urls.append(m.group(1))
123 chksum = None # in case the metalink is about a different file
125 self.setfromurls(urls)
128 def setfrommirrorlist(self, mirrorlist):
129 nf = self.download(mirrorlist, False, None)
132 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
135 for l in f.readline():
137 if l[0:6] == 'http://' or l[0:7] == 'https://':
139 self.setfromurls(urls)
142 def download(self, file, uncompress, chksum, markincomplete=False):
144 if 'baseurl' not in self:
145 if 'metalink' in self:
146 if file != self['metalink']:
147 metalinkchksum = self.setfrommetalink(self['metalink'])
148 if file == 'repodata/repomd.xml' and metalinkchksum and not chksum:
149 chksum = metalinkchksum
152 elif 'mirrorlist' in self:
153 if file != self['mirrorlist']:
154 self.setfrommirrorlist(self['mirrorlist'])
158 if 'baseurl' not in self:
159 print "%s: no baseurl" % self.name
161 url = re.sub(r'/$', '', self['baseurl']) + '/' + file
162 f = tempfile.TemporaryFile()
163 st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
164 if os.lseek(f.fileno(), 0, os.SEEK_CUR) == 0 and (st == 0 or not chksum):
166 os.lseek(f.fileno(), 0, os.SEEK_SET)
168 print "%s: download error %d" % (file, st)
170 self['incomplete'] = True
173 fchksum = solv.Chksum(chksum.type)
175 print "%s: unknown checksum type" % file
177 self['incomplete'] = True
179 fchksum.add_fd(f.fileno())
180 if fchksum != chksum:
181 print "%s: checksum mismatch" % file
183 self['incomplete'] = True
186 return solv.xfopen_fd(file, os.dup(f.fileno()))
187 return solv.xfopen_fd(None, os.dup(f.fileno()))
189 def usecachedrepo(self, ext, mark=False):
191 cookie = self['cookie']
193 cookie = self['extcookie']
195 repopath = self.cachepath(ext)
196 f = open(repopath, 'r')
197 f.seek(-32, os.SEEK_END)
199 if len(fcookie) != 32:
201 if cookie and fcookie != cookie:
203 if self.type != 'system' and not ext:
204 f.seek(-32 * 2, os.SEEK_END)
205 fextcookie = f.read(32)
206 if len(fextcookie) != 32:
211 flags = Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES
213 flags |= Repo.REPO_LOCALPOOL
214 if not self.handle.add_solv(f, flags):
216 if self.type != 'system' and not ext:
217 self['cookie'] = fcookie
218 self['extcookie'] = fextcookie
220 # no futimes in python?
222 os.utime(repopath, None)
229 def genextcookie(self, f):
230 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
231 chksum.add(self['cookie'])
233 stat = os.fstat(f.fileno())
234 chksum.add(str(stat[ST_DEV]))
235 chksum.add(str(stat[ST_INO]))
236 chksum.add(str(stat[ST_SIZE]))
237 chksum.add(str(stat[ST_MTIME]))
238 extcookie = chksum.raw()
239 # compatibility to c code
240 if ord(extcookie[0]) == 0:
241 extcookie[0] = chr(1)
242 self['extcookie'] = extcookie
244 def writecachedrepo(self, ext, info=None):
246 if not os.path.isdir("/var/cache/solv"):
247 os.mkdir("/var/cache/solv", 0755)
248 (fd, tmpname) = tempfile.mkstemp(prefix='.newsolv-', dir='/var/cache/solv')
250 f = os.fdopen(fd, 'w+')
255 else: # rewrite_repos case
256 self.handle.write_first_repodata(f)
257 if self.type != 'system' and not ext:
258 if 'extcookie' not in self:
260 f.write(self['extcookie'])
262 f.write(self['cookie'])
264 f.write(self['extcookie'])
266 if self.handle.iscontiguous():
267 # switch to saved repo to activate paging and save memory
268 nf = solv.xfopen(tmpname)
272 if not self.handle.add_solv(nf, Repo.SOLV_ADD_NO_STUBS):
273 sys.exit("internal error, cannot reload solv file")
276 # need to extend to repo boundaries, as this is how
277 # info.write() has written the data
278 info.extend_to_repo()
279 # LOCALPOOL does not help as pool already contains all ids
280 info.add_solv(nf, Repo.REPO_EXTEND_SOLVABLES)
282 os.rename(tmpname, self.cachepath(ext))
287 def updateaddedprovides(self, addedprovides):
288 if 'incomplete' in self:
290 if 'handle' not in self:
292 if self.handle.isempty():
294 # make sure there's just one real repodata with extensions
295 repodata = self.handle.first_repodata()
298 oldaddedprovides = repodata.lookup_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES)
299 if not set(addedprovides) <= set(oldaddedprovides):
300 for id in addedprovides:
301 repodata.add_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES, id)
302 repodata.internalize()
303 self.writecachedrepo(None, repodata)
305 class repo_repomd(repo_generic):
306 def load_if_changed(self):
307 print "rpmmd repo '%s':" % self.name,
309 f = self.download("repodata/repomd.xml", False, None, None)
311 print "no repomd.xml file, skipped"
312 self.handle.free(True)
315 self['cookie'] = calc_cookie_fp(f)
316 if self.usecachedrepo(None, True):
320 self.handle.add_repomdxml(f, 0)
323 (filename, filechksum) = self.find('primary')
325 f = self.download(filename, True, filechksum, True)
327 self.handle.add_rpmmd(f, None, 0)
329 if 'incomplete' in self:
330 return False # hopeless, need good primary
331 (filename, filechksum) = self.find('updateinfo')
333 f = self.download(filename, True, filechksum, True)
335 self.handle.add_updateinfoxml(f, 0)
338 if 'incomplete' not in self:
339 self.writecachedrepo(None)
340 # must be called after writing the repo
341 self.handle.create_stubs()
344 def find(self, what):
345 di = self.handle.Dataiterator(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE, what, Dataiterator.SEARCH_STRING)
346 di.prepend_keyname(solv.REPOSITORY_REPOMD)
349 filename = d.pool.lookup_str(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_LOCATION)
350 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_CHECKSUM)
351 if filename and not chksum:
352 print "no %s file checksum!" % filename
356 return (filename, chksum)
359 def add_ext(self, repodata, what, ext):
360 filename, chksum = self.find(what)
361 if not filename and what == 'deltainfo':
362 filename, chksum = self.find('prestodelta')
365 handle = repodata.new_handle()
366 repodata.set_poolstr(handle, solv.REPOSITORY_REPOMD_TYPE, what)
367 repodata.set_str(handle, solv.REPOSITORY_REPOMD_LOCATION, filename)
368 repodata.set_checksum(handle, solv.REPOSITORY_REPOMD_CHECKSUM, chksum)
370 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOSITORY_DELTAINFO)
371 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_FLEXARRAY)
373 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
374 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
375 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
378 repodata = self.handle.add_repodata(0)
379 self.add_ext(repodata, 'deltainfo', 'DL')
380 self.add_ext(repodata, 'filelists', 'FL')
381 repodata.internalize()
383 def load_ext(self, repodata):
384 repomdtype = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE)
385 if repomdtype == 'filelists':
387 elif repomdtype == 'deltainfo':
391 sys.stdout.write("[%s:%s: " % (self.name, ext))
392 if self.usecachedrepo(ext):
393 sys.stdout.write("cached]\n")
396 sys.stdout.write("fetching]\n")
398 filename = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_LOCATION)
399 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.REPOSITORY_REPOMD_CHECKSUM)
400 f = self.download(filename, True, filechksum)
404 self.handle.add_rpmmd(f, 'FL', Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
406 self.handle.add_deltainfoxml(f, Repo.REPO_USE_LOADING)
408 self.writecachedrepo(ext, repodata)
411 class repo_susetags(repo_generic):
412 def load_if_changed(self):
413 print "susetags repo '%s':" % self.name,
415 f = self.download("content", False, None, None)
417 print "no content file, skipped"
418 self.handle.free(True)
421 self['cookie'] = calc_cookie_fp(f)
422 if self.usecachedrepo(None, True):
426 self.handle.add_content(f, 0)
429 defvendorid = self.handle.lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
430 descrdir = self.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
432 descrdir = "suse/setup/descr"
433 (filename, filechksum) = self.find('packages.gz')
435 (filename, filechksum) = self.find('packages')
437 f = self.download(descrdir + '/' + filename, True, filechksum, True)
439 self.handle.add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.SUSETAGS_RECORD_SHARES)
441 (filename, filechksum) = self.find('packages.en.gz')
443 (filename, filechksum) = self.find('packages.en')
445 f = self.download(descrdir + '/' + filename, True, filechksum, True)
447 self.handle.add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.REPO_REUSE_REPODATA|Repo.REPO_EXTEND_SOLVABLES)
449 self.handle.internalize()
451 if 'incomplete' not in self:
452 self.writecachedrepo(None)
453 # must be called after writing the repo
454 self.handle.create_stubs()
457 def find(self, what):
458 di = self.handle.Dataiterator(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, what, Dataiterator.SEARCH_STRING)
459 di.prepend_keyname(solv.SUSETAGS_FILE)
462 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.SUSETAGS_FILE_CHECKSUM)
463 return (what, chksum)
466 def add_ext(self, repodata, what, ext):
467 (filename, chksum) = self.find(what)
470 handle = repodata.new_handle()
471 repodata.set_str(handle, solv.SUSETAGS_FILE_NAME, filename)
473 repodata.set_checksum(handle, solv.SUSETAGS_FILE_CHECKSUM, chksum)
475 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_DISKUSAGE)
476 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRNUMNUMARRAY)
478 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
479 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
481 for langtag, langtagtype in [
482 (solv.SOLVABLE_SUMMARY, solv.REPOKEY_TYPE_STR),
483 (solv.SOLVABLE_DESCRIPTION, solv.REPOKEY_TYPE_STR),
484 (solv.SOLVABLE_EULA, solv.REPOKEY_TYPE_STR),
485 (solv.SOLVABLE_MESSAGEINS, solv.REPOKEY_TYPE_STR),
486 (solv.SOLVABLE_MESSAGEDEL, solv.REPOKEY_TYPE_STR),
487 (solv.SOLVABLE_CATEGORY, solv.REPOKEY_TYPE_ID)
489 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, self.handle.pool.id2langid(langtag, ext, 1))
490 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, langtagtype)
491 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
494 repodata = self.handle.add_repodata(0)
495 di = self.handle.Dataiterator(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, None, 0)
496 di.prepend_keyname(solv.SUSETAGS_FILE)
501 if filename[0:9] != "packages.":
503 if len(filename) == 11 and filename != "packages.gz":
505 elif filename[11:12] == ".":
511 self.add_ext(repodata, filename, ext)
512 repodata.internalize()
514 def load_ext(self, repodata):
515 filename = repodata.lookup_str(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME)
517 sys.stdout.write("[%s:%s: " % (self.name, ext))
518 if self.usecachedrepo(ext):
519 sys.stdout.write("cached]\n")
522 sys.stdout.write("fetching]\n")
524 defvendorid = self.handle.lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
525 descrdir = self.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
527 descrdir = "suse/setup/descr"
528 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.SUSETAGS_FILE_CHECKSUM)
529 f = self.download(descrdir + '/' + filename, True, filechksum)
532 self.handle.add_susetags(f, defvendorid, ext, Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
534 self.writecachedrepo(ext, repodata)
537 class repo_unknown(repo_generic):
538 def load(self, pool):
539 print "unsupported repo '%s': skipped" % self.name
542 class repo_system(repo_generic):
543 def load(self, pool):
544 self.handle = pool.add_repo(self.name)
545 self.handle.appdata = self
546 pool.installed = self.handle
547 print "rpm database:",
548 self['cookie'] = calc_cookie_file("/var/lib/rpm/Packages")
549 if self.usecachedrepo(None):
553 self.handle.add_products("/etc/products.d", Repo.REPO_NO_INTERNALIZE)
554 self.handle.add_rpmdb(None, Repo.REPO_REUSE_REPODATA)
555 self.writecachedrepo(None)
558 class repo_cmdline(repo_generic):
559 def load(self, pool):
560 self.handle = pool.add_repo(self.name)
561 self.handle.appdata = self
564 def validarch(pool, arch):
567 id = pool.str2id(arch, False)
570 return pool.isknownarch(id)
572 def limitjobs(pool, jobs, flags, evrstr):
574 evr = pool.str2id(evrstr)
577 sel = how & Job.SOLVER_SELECTMASK
578 what = pool.rel2id(j.what, evr, flags)
579 if flags == solv.REL_ARCH:
580 how |= Job.SOLVER_SETARCH
581 elif flags == solv.REL_EQ and sel == Job.SOLVER_SOLVABLE_NAME:
582 if evrstr.find('-') >= 0:
583 how |= Job.SOLVER_SETEVR
585 how |= Job.SOLVER_SETEV
586 njobs.append(pool.Job(how, what))
589 def limitjobs_evrarch(pool, jobs, flags, evrstr):
590 m = re.match(r'(.+)\.(.+?)$', evrstr)
591 if m and validarch(pool, m.group(2)):
592 jobs = limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
594 return limitjobs(pool, jobs, flags, evrstr)
596 def mkjobs_filelist(pool, cmd, arg):
597 if re.search(r'[[*?]', arg):
598 type = Dataiterator.SEARCH_GLOB
600 type = Dataiterator.SEARCH_STRING
602 di = pool.installed.Dataiterator(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
604 di = pool.Dataiterator(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
608 if s and s.installable():
610 di.skip_solvable() # one match is enough
612 print "[using file list match for '%s']" % arg
614 return [ pool.Job(Job.SOLVER_SOLVABLE_ONE_OF, pool.towhatprovides(matches)) ]
616 return [ pool.Job(Job.SOLVER_SOLVABLE | Job.SOLVER_NOAUTOSET, matches[0]) ]
619 def mkjobs_rel(pool, cmd, name, rel, evr):
621 if rel.find('<') >= 0: flags |= solv.REL_LT
622 if rel.find('=') >= 0: flags |= solv.REL_EQ
623 if rel.find('>') >= 0: flags |= solv.REL_GT
624 jobs = depglob(pool, name, True, True)
626 return limitjobs(pool, jobs, flags, evr)
627 m = re.match(r'(.+)\.(.+?)$', name)
628 if m and validarch(pool, m.group(2)):
629 jobs = depglob(pool, m.group(1), True, True)
631 jobs = limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
632 return limitjobs(pool, jobs, flags, evr)
635 def mkjobs_nevra(pool, cmd, arg):
636 jobs = depglob(pool, arg, True, True)
639 m = re.match(r'(.+)\.(.+?)$', arg)
640 if m and validarch(pool, m.group(2)):
641 jobs = depglob(pool, m.group(1), True, True)
643 return limitjobs(pool, jobs, solv.REL_ARCH, m.group(2))
644 m = re.match(r'(.+)-(.+?)$', arg)
646 jobs = depglob(pool, m.group(1), True, False)
648 return limitjobs_evrarch(pool, jobs, solv.REL_EQ, m.group(2))
649 m = re.match(r'(.+)-(.+?-.+?)$', arg)
651 jobs = depglob(pool, m.group(1), True, False)
653 return limitjobs_evrarch(pool, jobs, solv.REL_EQ, m.group(2))
656 def mkjobs(pool, cmd, arg):
657 if len(arg) and arg[0] == '/':
658 jobs = mkjobs_filelist(pool, cmd, arg)
661 m = re.match(r'(.+?)\s*([<=>]+)\s*(.+?)$', arg)
663 return mkjobs_rel(pool, cmd, m.group(1), m.group(2), m.group(3))
665 return mkjobs_nevra(pool, cmd, arg)
667 def depglob(pool, name, globname, globdep):
668 id = pool.str2id(name, False)
671 for s in pool.whatprovides(id):
672 if globname and s.nameid == id:
673 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) ]
676 if globname and globdep:
677 print "[using capability match for '%s']" % name
678 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) ]
679 if not re.search(r'[[*?]', name):
684 for d in pool.Dataiterator(0, solv.SOLVABLE_NAME, name, Dataiterator.SEARCH_GLOB):
687 idmatches[s.nameid] = True
689 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) for id in sorted(idmatches.keys()) ]
691 # try dependency glob
692 idmatches = pool.matchprovidingids(name, Dataiterator.SEARCH_GLOB)
694 print "[using capability match for '%s']" % name
695 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) for id in sorted(idmatches) ]
699 def load_stub(repodata):
700 repo = repodata.repo.appdata
702 return repo.load_ext(repodata)
706 parser = OptionParser(usage="usage: solv.py [options] COMMAND")
707 (options, args) = parser.parse_args()
709 parser.print_help(sys.stderr)
726 # read all repo configs
728 for reposdir in ["/etc/zypp/repos.d"]:
729 if not os.path.isdir(reposdir):
731 for reponame in sorted(glob.glob('%s/*.repo' % reposdir)):
732 cfg = INIConfig(open(reponame))
734 repoattr = {'enabled': 0, 'priority': 99, 'autorefresh': 1, 'type': 'rpm-md', 'metadata_expire': 900}
736 repoattr[k] = cfg[alias][k]
737 if 'mirrorlist' in repoattr and 'metalink' not in repoattr:
738 if repoattr['mirrorlist'].find('/metalink'):
739 repoattr['metalink'] = repoattr['mirrorlist']
740 del repoattr['mirrorlist']
741 if repoattr['type'] == 'rpm-md':
742 repo = repo_repomd(alias, 'repomd', repoattr)
743 elif repoattr['type'] == 'yast2':
744 repo = repo_susetags(alias, 'susetags', repoattr)
746 repo = repo_unknown(alias, 'unknown', repoattr)
750 pool.setarch(os.uname()[4])
751 pool.set_loadcallback(load_stub)
753 # now load all enabled repos into the pool
754 sysrepo = repo_system('@System', 'system')
757 if int(repo['enabled']):
762 di = pool.Dataiterator(0, solv.SOLVABLE_NAME, args[0], Dataiterator.SEARCH_SUBSTRING|Dataiterator.SEARCH_NOCASE)
764 matches[d.solvid] = True
765 for solvid in sorted(matches.keys()):
766 print " - %s [%s]: %s" % (pool.solvid2str(solvid), pool.solvables[solvid].repo.name, pool.lookup_str(solvid, solv.SOLVABLE_SUMMARY))
770 if cmd == 'list' or cmd == 'info' or cmd == 'install':
772 if arg.endswith(".rpm") and os.access(arg, os.R_OK):
774 cmdlinerepo = repo_cmdline('@commandline', 'cmdline')
775 cmdlinerepo.load(pool)
776 cmdlinerepo['packages'] = {}
777 cmdlinerepo['packages'][arg] = cmdlinerepo.handle.add_rpm(arg, Repo.REPO_REUSE_REPODATA|Repo.REPO_NO_INTERNALIZE)
779 cmdlinerepo.handle.internalize()
781 addedprovides = pool.addfileprovides_ids()
783 sysrepo.updateaddedprovides(addedprovides)
785 repo.updateaddedprovides(addedprovides)
787 pool.createwhatprovides()
789 # convert arguments into jobs
792 if cmdlinerepo and arg in cmdlinerepo['packages']:
793 jobs.append(pool.Job(Job.SOLVER_SOLVABLE, cmdlinerepo['packages'][arg]))
795 njobs = mkjobs(pool, cmd, arg)
797 print "nothing matches '%s'" % arg
801 if cmd == 'list' or cmd == 'info':
803 print "no package matched."
806 for s in job.solvables():
809 print "Repo: %s" % s.repo
810 print "Summary: %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
811 str = s.lookup_str(solv.SOLVABLE_URL)
813 print "Url: %s" % str
814 str = s.lookup_str(solv.SOLVABLE_LICENSE)
816 print "License: %s" % str
817 print "Description:\n%s" % s.lookup_str(solv.SOLVABLE_DESCRIPTION)
820 print " - %s [%s]" % (s, s.repo)
821 print " %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
824 if cmd == 'install' or cmd == 'erase' or cmd == 'up' or cmd == 'dup' or cmd == 'verify':
826 if cmd == 'up' or cmd == 'verify' or cmd == 'dup':
827 jobs = [ pool.Job(Job.SOLVER_SOLVABLE_ALL, 0) ]
829 print "no package matched."
833 # up magic: use install instead of update if no installed package matches
834 if job.how == Job.SOLVER_SOLVABLE_ALL or filter(lambda s: s.isinstalled(), job.solvables()):
835 job.how |= Job.SOLVER_UPDATE
837 job.how |= Job.SOLVER_INSTALL
838 elif cmd == 'install':
839 job.how |= Job.SOLVER_INSTALL
841 job.how |= Job.SOLVER_ERASE
843 job.how |= Job.SOLVER_DISTUPGRADE
844 elif cmd == 'verify':
845 job.how |= Job.SOLVER_VERIFY
847 #pool.set_debuglevel(2)
850 solver = pool.Solver()
851 solver.set_flag(Solver.SOLVER_FLAG_IGNORE_ALREADY_RECOMMENDED, 1);
852 solver.set_flag(Solver.SOLVER_FLAG_SPLITPROVIDES, 1);
854 solver.set_flag(Solver.SOLVER_FLAG_ALLOW_UNINSTALL, 1);
855 problems = solver.solve(jobs)
858 for problem in problems:
859 print "Problem %d:" % problem.id
860 r = problem.findproblemrule()
862 print ri.problemstr()
863 solutions = problem.solutions()
864 for solution in solutions:
865 print " Solution %d:" % solution.id
866 elements = solution.elements(True)
867 for element in elements:
868 print " - %s" % element.str()
871 while not (sol == 's' or sol == 'q' or (sol.isdigit() and int(sol) >= 1 and int(sol) <= len(solutions))):
872 sys.stdout.write("Please choose a solution: ")
874 sol = sys.stdin.readline().strip()
876 continue # skip problem
879 solution = solutions[int(sol) - 1]
880 for element in solution.elements():
881 newjob = element.Job()
882 if element.type == Solver.SOLVER_SOLUTION_JOB:
883 jobs[element.jobidx] = newjob
885 if newjob and newjob not in jobs:
888 # no problems, show transaction
889 trans = solver.transaction()
892 print "Nothing to do."
895 print "Transaction summary:"
897 for cl in trans.classify():
898 if cl.type == Transaction.SOLVER_TRANSACTION_ERASE:
899 print "%d erased packages:" % cl.count
900 elif cl.type == Transaction.SOLVER_TRANSACTION_INSTALL:
901 print "%d installed packages:" % cl.count
902 elif cl.type == Transaction.SOLVER_TRANSACTION_REINSTALLED:
903 print "%d reinstalled packages:" % cl.count
904 elif cl.type == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
905 print "%d downgraded packages:" % cl.count
906 elif cl.type == Transaction.SOLVER_TRANSACTION_CHANGED:
907 print "%d changed packages:" % cl.count
908 elif cl.type == Transaction.SOLVER_TRANSACTION_UPGRADED:
909 print "%d upgraded packages:" % cl.count
910 elif cl.type == Transaction.SOLVER_TRANSACTION_VENDORCHANGE:
911 print "%d vendor changes from '%s' to '%s':" % (cl.count, pool.id2str(cl.fromid), pool.id2str(cl.toid))
912 elif cl.type == Transaction.SOLVER_TRANSACTION_ARCHCHANGE:
913 print "%d arch changes from '%s' to '%s':" % (cl.count, pool.id2str(cl.fromid), pool.id2str(cl.toid))
916 for p in cl.solvables():
917 if cl.type == Transaction.SOLVER_TRANSACTION_UPGRADED or cl.type == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
918 op = trans.othersolvable(p)
919 print " - %s -> %s" % (p, op)
923 print "install size change: %d K" % trans.calc_installsizechange()
927 sys.stdout.write("OK to continue (y/n)? ")
929 yn = sys.stdin.readline().strip()
931 if yn == 'n': sys.exit(1)
932 newpkgs = trans.newpackages()
937 downloadsize += p.lookup_num(solv.SOLVABLE_DOWNLOADSIZE)
938 print "Downloading %d packages, %d K" % (len(newpkgs), downloadsize)
940 repo = p.repo.appdata
941 location, medianr = p.lookup_location()
944 if repo.type == 'commandline':
945 f = solv.xfopen(location)
947 sys.exit("\n%s: %s not found" % location)
950 if not sysrepo.handle.isempty() and os.access('/usr/bin/applydeltarpm', os.X_OK):
952 di = p.repo.Dataiterator(solv.SOLVID_META, solv.DELTA_PACKAGE_NAME, pname, Dataiterator.SEARCH_STRING)
953 di.prepend_keyname(solv.REPOSITORY_DELTAINFO)
956 if pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_EVR) != p.evrid or pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_ARCH) != p.archid:
958 baseevrid = pool.lookup_id(solv.SOLVID_POS, solv.DELTA_BASE_EVR)
960 for installedp in pool.whatprovides(p.nameid):
961 if installedp.isinstalled() and installedp.nameid == p.nameid and installedp.archid == p.archid and installedp.evrid == baseevrid:
962 candidate = installedp
965 seq = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_EVR) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NUM)
966 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, '-c', '-s', seq])
969 chksum = pool.lookup_checksum(solv.SOLVID_POS, solv.DELTA_CHECKSUM)
972 dloc = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_DIR) + '/' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_EVR) + '.' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_SUFFIX)
973 f = repo.download(dloc, False, chksum)
976 nf = tempfile.TemporaryFile()
977 nf = os.dup(nf.fileno())
978 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, "/dev/fd/%d" % solv.xfileno(f), "/dev/fd/%d" % nf])
980 os.lseek(nf, 0, os.SEEK_SET)
981 newpkgsfp[p.id] = solv.xfopen_fd("", nf)
983 if p.id in newpkgsfp:
984 sys.stdout.write("d")
988 if repo.type == 'susetags':
989 datadir = repo.handle.lookup_str(solv.SOLVID_META, solv.SUSETAGS_DATADIR)
992 location = datadir + '/' + location
993 chksum = p.lookup_checksum(solv.SOLVABLE_CHECKSUM)
994 f = repo.download(location, False, chksum)
996 sys.exit("\n%s: %s not found in repository" % (repo.name, location))
998 sys.stdout.write(".")
1001 print "Committing transaction:"
1003 ts = rpm.TransactionSet('/')
1004 ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
1005 erasenamehelper = {}
1006 for p in trans.steps():
1007 type = trans.steptype(p, Transaction.SOLVER_TRANSACTION_RPM_ONLY)
1008 if type == Transaction.SOLVER_TRANSACTION_ERASE:
1009 rpmdbid = p.lookup_num(solv.RPM_RPMDBID)
1010 erasenamehelper[p.name] = p
1012 sys.exit("\ninternal error: installed package %s has no rpmdbid\n" % p)
1013 ts.addErase(rpmdbid)
1014 elif type == Transaction.SOLVER_TRANSACTION_INSTALL:
1016 h = ts.hdrFromFdno(solv.xfileno(f))
1017 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1018 ts.addInstall(h, p, 'u')
1019 elif type == Transaction.SOLVER_TRANSACTION_MULTIINSTALL:
1021 h = ts.hdrFromFdno(solv.xfileno(f))
1022 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1023 ts.addInstall(h, p, 'i')
1024 checkproblems = ts.check()
1029 def runCallback(reason, amount, total, p, d):
1030 if reason == rpm.RPMCALLBACK_INST_OPEN_FILE:
1031 return solv.xfileno(newpkgsfp[p.id])
1032 if reason == rpm.RPMCALLBACK_INST_START:
1034 if reason == rpm.RPMCALLBACK_UNINST_START:
1035 # argh, p is just the name of the package
1036 if p in erasenamehelper:
1037 p = erasenamehelper[p]
1039 runproblems = ts.run(runCallback, '')
1045 print "unknown command", cmd