4 # Copyright (c) 2011, Novell Inc.
6 # This program is licensed under the BSD license, read LICENSE.BSD
7 # for further information
10 # pysolv a little software installer demoing the sat solver library/bindings
13 # - understands globs for package names / dependencies
14 # - understands .arch suffix
15 # - repository data caching
16 # - on demand loading of secondary repository data
17 # - checksum verification
19 # - installation of commandline packages
21 # things not yet ported:
24 # - fastestmirror implementation
26 # things available in the library but missing from pysolv:
27 # - vendor policy loading
28 # - soft locks file handling
29 # - multi version handling
42 from solv import Pool, Repo, Dataiterator, Job, Solver, Transaction
43 from iniparse import INIConfig
44 from optparse import OptionParser
47 #gc.set_debug(gc.DEBUG_LEAK)
49 def calc_cookie_file(filename):
50 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
52 chksum.add_stat(filename)
55 def calc_cookie_fp(fp):
56 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
60 class generic_repo(dict):
61 def __init__(self, attribs):
65 def cachepath(self, ext = None):
66 path = re.sub(r'^\.', '_', self['alias'])
68 path += "_" + ext + ".solvx"
71 return "/var/cache/solv/" + re.sub(r'[/]', '_', path)
74 self['handle'] = pool.add_repo(repo['alias'])
75 self['handle'].appdata = repo
76 self['handle'].priority = 99 - repo['priority']
77 if self['autorefresh']:
81 st = os.stat(self.cachepath())
82 if time.time() - st[ST_MTIME] < self['metadata_expire']:
87 if not dorefresh and self.usecachedrepo(None):
88 print "repo: '%s': cached" % self['alias']
90 return self.load_if_changed()
92 def load_ext(repodata):
95 def setfromurls(self, urls):
99 print "[using mirror %s]" % re.sub(r'^(.*?/...*?)/.*$', r'\1', url)
100 self['baseurl'] = url
102 def setfrommetalink(self, metalink):
103 nf = self.download(metalink, False, None)
106 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
110 for l in f.readlines():
112 m = re.match(r'^<hash type="sha256">([0-9a-fA-F]{64})</hash>', l)
114 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256, m.group(1))
115 m = re.match(r'^<url.*>(https?://.+)repodata/repomd.xml</url>', l)
117 urls.append(m.group(1))
119 chksum = None # in case the metalink is about a different file
121 self.setfromurls(urls)
124 def setfrommirrorlist(self, mirrorlist):
125 nf = self.download(mirrorlist, False, None)
128 f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
131 for l in f.readline():
133 if l[0:6] == 'http://' or l[0:7] == 'https://':
135 self.setfromurls(urls)
138 def download(self, file, uncompress, chksum, markincomplete=False):
140 if 'baseurl' not in self:
141 if 'metalink' in self:
142 if file != self['metalink']:
143 metalinkchksum = self.setfrommetalink(self['metalink'])
144 if file == 'repodata/repomd.xml' and metalinkchksum and not chksum:
145 chksum = metalinkchksum
148 elif 'mirrorlist' in self:
149 if file != self['mirrorlist']:
150 self.setfrommirrorlist(self['mirrorlist'])
154 if 'baseurl' not in self:
155 print "%s: no baseurl" % self['alias']
157 url = re.sub(r'/$', '', self['baseurl']) + '/' + file
158 f = tempfile.TemporaryFile()
159 st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
160 if os.lseek(f.fileno(), 0, os.SEEK_CUR) == 0 and (st == 0 or not chksum):
162 os.lseek(f.fileno(), 0, os.SEEK_SET)
164 print "%s: download error %d" % (file, st)
166 self['incomplete'] = True
169 fchksum = solv.Chksum(chksum.type)
171 print "%s: unknown checksum type" % file
173 self['incomplete'] = True
175 fchksum.add_fd(f.fileno())
176 if not fchksum.matches(chksum):
177 print "%s: checksum mismatch" % file
179 self['incomplete'] = True
182 return solv.xfopen_fd(file, os.dup(f.fileno()))
183 return solv.xfopen_fd("", os.dup(f.fileno()))
185 def usecachedrepo(self, ext, mark=False):
187 cookie = self['cookie']
189 cookie = self['extcookie']
190 handle = self['handle']
192 repopath = self.cachepath(ext)
193 f = open(repopath, 'r')
194 f.seek(-32, os.SEEK_END)
196 if len(fcookie) != 32:
198 if cookie and fcookie != cookie:
200 if self['alias'] != '@System' and not ext:
201 f.seek(-32 * 2, os.SEEK_END)
202 fextcookie = f.read(32)
203 if len(fextcookie) != 32:
208 flags = Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES
210 flags |= Repo.REPO_LOCALPOOL
211 if not self['handle'].add_solv(f, flags):
213 if self['alias'] != '@System' and not ext:
214 self['cookie'] = fcookie
215 self['extcookie'] = fextcookie
217 # no futimes in python?
219 os.utime(repopath, None)
226 def genextcookie(self, f):
227 chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
228 chksum.add(self['cookie'])
230 stat = os.fstat(f.fileno())
231 chksum.add(str(stat[ST_DEV]))
232 chksum.add(str(stat[ST_INO]))
233 chksum.add(str(stat[ST_SIZE]))
234 chksum.add(str(stat[ST_MTIME]))
235 extcookie = chksum.raw()
236 # compatibility to c code
237 if ord(extcookie[0]) == 0:
238 extcookie[0] = chr(1)
239 self['extcookie'] = extcookie
241 def writecachedrepo(self, ext, info=None):
243 if not os.path.isdir("/var/cache/solv"):
244 os.mkdir("/var/cache/solv", 0755)
245 (fd, tmpname) = tempfile.mkstemp(prefix='.newsolv-', dir='/var/cache/solv')
247 f = os.fdopen(fd, 'w+')
249 self['handle'].write(f)
252 else: # rewrite_repos case
253 self['handle'].write_first_repodata(f)
254 if self['alias'] != '@System' and not ext:
255 if 'extcookie' not in self:
257 f.write(self['extcookie'])
259 f.write(self['cookie'])
261 f.write(self['extcookie'])
263 if self['handle'].iscontiguous():
264 # switch to saved repo to activate paging and save memory
265 nf = solv.xfopen(tmpname)
268 self['handle'].empty()
269 if not self['handle'].add_solv(nf, Repo.SOLV_ADD_NO_STUBS):
270 sys.exit("internal error, cannot reload solv file")
273 # need to extend to repo boundaries, as this is how
274 # info.write() has written the data
275 info.extend_to_repo()
276 # LOCALPOOL does not help as pool already contains all ids
277 info.read_solv_flags(nf, Repo.REPO_EXTEND_SOLVABLES)
279 os.rename(tmpname, self.cachepath(ext))
284 def updateaddedprovides(self, addedprovides):
285 if 'incomplete' in self:
287 if 'handle' not in self:
289 if not self['handle'].nsolvables:
291 # make sure there's just one real repodata with extensions
292 repodata = self['handle'].first_repodata()
295 oldaddedprovides = repodata.lookup_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES)
296 if not set(addedprovides) <= set(oldaddedprovides):
297 for id in addedprovides:
298 repodata.add_idarray(solv.SOLVID_META, solv.REPOSITORY_ADDEDFILEPROVIDES, id)
299 repodata.internalize()
300 self.writecachedrepo(None, repodata)
302 class repomd_repo(generic_repo):
303 def load_if_changed(self):
304 print "rpmmd repo '%s':" % self['alias'],
306 f = self.download("repodata/repomd.xml", False, None, None)
308 print "no repomd.xml file, skipped"
309 self['handle'].free(True)
312 self['cookie'] = calc_cookie_fp(f)
313 if self.usecachedrepo(None, True):
317 self['handle'].add_repomdxml(f, 0)
320 (filename, filechksum) = self.find('primary')
322 f = self.download(filename, True, filechksum, True)
324 self['handle'].add_rpmmd(f, None, 0)
326 if 'incomplete' in self:
327 return False # hopeless, need good primary
328 (filename, filechksum) = self.find('updateinfo')
330 f = self.download(filename, True, filechksum, True)
332 self['handle'].add_updateinfoxml(f, 0)
335 if 'incomplete' not in self:
336 self.writecachedrepo(None)
337 # must be called after writing the repo
338 self['handle'].create_stubs()
341 def find(self, what):
342 di = self['handle'].dataiterator_new(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE, what, Dataiterator.SEARCH_STRING)
343 di.prepend_keyname(solv.REPOSITORY_REPOMD)
346 filename = d.pool.lookup_str(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_LOCATION)
347 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.REPOSITORY_REPOMD_CHECKSUM)
348 if filename and not chksum:
349 print "no %s file checksum!" % filename
353 return (filename, chksum)
356 def add_ext(self, repodata, what, ext):
357 filename, chksum = self.find(what)
358 if not filename and what == 'deltainfo':
359 filename, chksum = self.find('prestodelta')
362 handle = repodata.new_handle()
363 repodata.set_poolstr(handle, solv.REPOSITORY_REPOMD_TYPE, what)
364 repodata.set_str(handle, solv.REPOSITORY_REPOMD_LOCATION, filename)
365 repodata.set_bin_checksum(handle, solv.REPOSITORY_REPOMD_CHECKSUM, chksum)
366 if what == 'deltainfo':
367 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOSITORY_DELTAINFO)
368 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_FLEXARRAY)
369 elif what == 'filelists':
370 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
371 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
372 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
375 repodata = self['handle'].add_repodata(0)
376 self.add_ext(repodata, 'deltainfo', 'DL')
377 self.add_ext(repodata, 'filelists', 'FL')
378 repodata.internalize()
380 def load_ext(self, repodata):
381 repomdtype = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_TYPE)
382 if repomdtype == 'filelists':
384 elif repomdtype == 'deltainfo':
388 sys.stdout.write("[%s:%s" % (self['alias'], ext))
389 if self.usecachedrepo(ext):
390 sys.stdout.write(" cached]\n")
393 sys.stdout.write(" fetching]\n")
395 filename = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_LOCATION)
396 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.REPOSITORY_REPOMD_CHECKSUM)
397 f = self.download(filename, True, filechksum)
401 self['handle'].add_rpmmd(f, 'FL', Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
403 self['handle'].add_deltainfoxml(f, Repo.REPO_USE_LOADING)
405 self.writecachedrepo(ext, repodata)
408 class susetags_repo(generic_repo):
409 def load_if_changed(self):
410 print "susetags repo '%s':" % self['alias'],
412 f = self.download("content", False, None, None)
414 print "no content file, skipped"
415 self['handle'].free(True)
418 self['cookie'] = calc_cookie_fp(f)
419 if self.usecachedrepo(None, True):
423 self['handle'].add_content(f, 0)
426 defvendorid = self['handle'].lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
427 descrdir = self['handle'].lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
429 descrdir = "suse/setup/descr"
430 (filename, filechksum) = self.find('packages.gz')
432 (filename, filechksum) = self.find('packages')
434 f = self.download(descrdir + '/' + filename, True, filechksum, True)
436 self['handle'].add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.SUSETAGS_RECORD_SHARES)
438 (filename, filechksum) = self.find('packages.en.gz')
440 (filename, filechksum) = self.find('packages.en')
442 f = self.download(descrdir + '/' + filename, True, filechksum, True)
444 self['handle'].add_susetags(f, defvendorid, None, Repo.REPO_NO_INTERNALIZE|Repo.REPO_REUSE_REPODATA|Repo.REPO_EXTEND_SOLVABLES)
446 self['handle'].internalize()
448 if 'incomplete' not in self:
449 self.writecachedrepo(None)
450 # must be called after writing the repo
451 self['handle'].create_stubs()
454 def find(self, what):
455 di = self['handle'].dataiterator_new(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, what, Dataiterator.SEARCH_STRING)
456 di.prepend_keyname(solv.SUSETAGS_FILE)
459 chksum = d.pool.lookup_checksum(solv.SOLVID_POS, solv.SUSETAGS_FILE_CHECKSUM)
460 return (what, chksum)
463 def add_ext(self, repodata, what, ext):
464 (filename, chksum) = self.find(what)
467 handle = repodata.new_handle()
468 repodata.set_str(handle, solv.SUSETAGS_FILE_NAME, filename)
470 repodata.set_bin_checksum(handle, solv.SUSETAGS_FILE_CHECKSUM, chksum)
472 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_DISKUSAGE)
473 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRNUMNUMARRAY)
475 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.SOLVABLE_FILELIST)
476 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, solv.REPOKEY_TYPE_DIRSTRARRAY)
478 for langtag, langtagtype in [
479 (solv.SOLVABLE_SUMMARY, solv.REPOKEY_TYPE_STR),
480 (solv.SOLVABLE_DESCRIPTION, solv.REPOKEY_TYPE_STR),
481 (solv.SOLVABLE_EULA, solv.REPOKEY_TYPE_STR),
482 (solv.SOLVABLE_MESSAGEINS, solv.REPOKEY_TYPE_STR),
483 (solv.SOLVABLE_MESSAGEDEL, solv.REPOKEY_TYPE_STR),
484 (solv.SOLVABLE_CATEGORY, solv.REPOKEY_TYPE_ID)
486 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, self['handle'].pool.id2langid(langtag, ext, 1))
487 repodata.add_idarray(handle, solv.REPOSITORY_KEYS, langtagtype)
488 repodata.add_flexarray(solv.SOLVID_META, solv.REPOSITORY_EXTERNAL, handle)
491 repodata = self['handle'].add_repodata(0)
492 di = self['handle'].dataiterator_new(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME, None, 0)
493 di.prepend_keyname(solv.SUSETAGS_FILE)
495 filename = d.match_str()
498 if filename[0:9] != "packages.":
500 if len(filename) == 11 and filename != "packages.gz":
502 elif filename[11:12] == ".":
508 self.add_ext(repodata, filename, ext)
509 repodata.internalize()
511 def load_ext(self, repodata):
512 filename = repodata.lookup_str(solv.SOLVID_META, solv.SUSETAGS_FILE_NAME)
514 sys.stdout.write("[%s:%s" % (self['alias'], ext))
515 if self.usecachedrepo(ext):
516 sys.stdout.write(" cached]\n")
519 sys.stdout.write(" fetching]\n")
521 defvendorid = self['handle'].lookup_id(solv.SOLVID_META, solv.SUSETAGS_DEFAULTVENDOR)
522 descrdir = self['handle'].lookup_str(solv.SOLVID_META, solv.SUSETAGS_DESCRDIR)
524 descrdir = "suse/setup/descr"
525 filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.SUSETAGS_FILE_CHECKSUM)
526 f = self.download(descrdir + '/' + filename, True, filechksum)
529 self['handle'].add_susetags(f, defvendorid, None, Repo.REPO_USE_LOADING|Repo.REPO_EXTEND_SOLVABLES)
531 self.writecachedrepo(ext, repodata)
534 class unknown_repo(generic_repo):
535 def load(self, pool):
536 print "unsupported repo '%s': skipped" % self['alias']
539 class system_repo(generic_repo):
540 def load(self, pool):
541 self['handle'] = pool.add_repo(self['alias'])
542 self['handle'].appdata = self
543 pool.installed = self['handle']
544 print "rpm database:",
545 self['cookie'] = calc_cookie_file("/var/lib/rpm/Packages")
546 if self.usecachedrepo(None):
550 self['handle'].add_products("/etc/products.d", Repo.REPO_NO_INTERNALIZE)
551 self['handle'].add_rpmdb(None)
552 self.writecachedrepo(None)
555 class cmdline_repo(generic_repo):
556 def load(self, pool):
557 self['handle'] = pool.add_repo(self['alias'])
558 self['handle'].appdata = self
561 def validarch(pool, arch):
564 id = pool.str2id(arch, False)
567 return pool.isknownarch(id)
569 def limitjobs(pool, jobs, flags, evr):
573 sel = how & Job.SOLVER_SELECTMASK
574 what = pool.rel2id(j.what, evr, flags)
575 if flags == solv.REL_ARCH:
576 how |= Job.SOLVER_SETARCH
577 elif flags == solv.REL_EQ and sel == Job.SOLVER_SOLVABLE_NAME:
578 if pool.id2str(evr).find('-') >= 0:
579 how |= Job.SOLVER_SETEVR
581 how |= Job.SOLVER_SETEV
582 njobs.append(pool.Job(how, what))
585 def limitjobs_arch(pool, jobs, flags, evr):
586 m = re.match(r'(.+)\.(.+?)$', evr)
587 if m and validarch(pool, m.group(2)):
588 jobs = limitjobs(pool, jobs, solv.REL_ARCH, pool.str2id(m.group(2)))
589 return limitjobs(pool, jobs, flags, pool.str2id(m.group(1)))
591 return limitjobs(pool, jobs, flags, pool.str2id(evr))
593 def mkjobs_filelist(pool, cmd, arg):
594 if re.search(r'[[*?]', arg):
595 type = Dataiterator.SEARCH_GLOB
597 type = Dataiterator.SEARCH_STRING
599 di = pool.installed.dataiterator_new(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
601 di = pool.dataiterator_new(0, solv.SOLVABLE_FILELIST, arg, type | Dataiterator.SEARCH_FILES|Dataiterator.SEARCH_COMPLETE_FILELIST)
605 if s and s.installable():
607 di.skip_solvable() # one match is enough
609 print "[using file list match for '%s']" % arg
611 return [ pool.Job(Job.SOLVER_SOLVABLE_ONE_OF, pool.towhatprovides(matches)) ]
613 return [ pool.Job(Job.SOLVER_SOLVABLE | Job.SOLVER_NOAUTOSET, matches[0]) ]
616 def mkjobs_rel(pool, cmd, name, rel, evr):
618 if rel.find('<') >= 0: flags |= solv.REL_LT
619 if rel.find('=') >= 0: flags |= solv.REL_EQ
620 if rel.find('>') >= 0: flags |= solv.REL_GT
621 jobs = depglob(pool, name, True, True)
623 return limitjobs(pool, jobs, flags, pool.str2id(evr))
624 m = re.match(r'(.+)\.(.+?)$', name)
625 if m and validarch(pool, m.group(2)):
626 jobs = depglob(pool, m.group(1), True, True)
628 jobs = limitjobs(pool, jobs, solv.REL_ARCH, pool.str2id(m.group(2)))
629 return limitjobs(pool, jobs, flags, pool.str2id(evr))
632 def mkjobs_nevra(pool, cmd, arg):
633 jobs = depglob(pool, arg, True, True)
636 m = re.match(r'(.+)\.(.+?)$', arg)
637 if m and validarch(pool, m.group(2)):
638 jobs = depglob(pool, m.group(1), True, True)
640 return limitjobs(pool, jobs, solv.REL_ARCH, pool.str2id(m.group(2)))
641 m = re.match(r'(.+)-(.+?)$', arg)
643 jobs = depglob(pool, m.group(1), True, False)
645 return limitjobs_arch(pool, jobs, solv.REL_EQ, m.group(2))
646 m = re.match(r'(.+)-(.+?-.+?)$', arg)
648 jobs = depglob(pool, m.group(1), True, False)
650 return limitjobs_arch(pool, jobs, solv.REL_EQ, m.group(2))
653 def mkjobs(pool, cmd, arg):
654 if len(arg) and arg[0] == '/':
655 jobs = mkjobs_filelist(pool, cmd, arg)
658 m = re.match(r'(.+?)\s*([<=>]+)\s*(.+?)$', arg)
660 return mkjobs_rel(pool, cmd, m.group(1), m.group(2), m.group(3))
662 return mkjobs_nevra(pool, cmd, arg)
664 def depglob(pool, name, globname, globdep):
665 id = pool.str2id(name, False)
668 for s in pool.providers(id):
669 if globname and s.nameid == id:
670 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) ]
673 if globname and globdep:
674 print "[using capability match for '%s']" % name
675 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) ]
676 if not re.search(r'[[*?]', name):
681 for d in pool.dataiterator_new(0, solv.SOLVABLE_NAME, name, Dataiterator.SEARCH_GLOB):
684 idmatches[s.nameid] = True
686 return [ pool.Job(Job.SOLVER_SOLVABLE_NAME, id) for id in sorted(idmatches.keys()) ]
688 # try dependency glob
689 idmatches = pool.matchprovidingids(name, Dataiterator.SEARCH_GLOB)
691 print "[using capability match for '%s']" % name
692 return [ pool.Job(Job.SOLVER_SOLVABLE_PROVIDES, id) for id in sorted(idmatches) ]
696 def load_stub(repodata):
697 repo = repodata.repo.appdata
699 return repo.load_ext(repodata)
703 parser = OptionParser(usage="usage: solv.py [options] COMMAND")
704 (options, args) = parser.parse_args()
706 parser.print_help(sys.stderr)
724 pool.setarch(os.uname()[4])
725 pool.set_loadcallback(load_stub)
727 # read all repo configs
729 for reposdir in ["/etc/zypp/repos.d"]:
730 if not os.path.isdir(reposdir):
732 for reponame in sorted(glob.glob('%s/*.repo' % reposdir)):
733 cfg = INIConfig(open(reponame))
735 repoattr = {'alias': alias, 'enabled': 0, 'priority': 99, 'autorefresh': 1, 'type': 'rpm-md', 'metadata_expire': 900}
737 repoattr[k] = cfg[alias][k]
738 if 'mirrorlist' in repoattr and 'metalink' not in repoattr:
739 if repoattr['mirrorlist'].find('/metalink'):
740 repoattr['metalink'] = repoattr['mirrorlist']
741 del repoattr['mirrorlist']
742 if repoattr['type'] == 'rpm-md':
743 repo = repomd_repo(repoattr)
744 elif repoattr['type'] == 'yast2':
745 repo = susetags_repo(repoattr)
747 repo = unknown_repo(repoattr)
750 # now load all enabled repos into the pool
751 sysrepo = system_repo({ 'alias': '@System', 'type': 'system' })
754 if int(repo['enabled']):
759 di = pool.dataiterator_new(0, solv.SOLVABLE_NAME, args[0], Dataiterator.SEARCH_SUBSTRING|Dataiterator.SEARCH_NOCASE)
761 matches[d.solvid] = True
762 for solvid in sorted(matches.keys()):
763 print " - %s [%s]: %s" % (pool.solvid2str(solvid), pool.solvables[solvid].repo.name, pool.lookup_str(solvid, solv.SOLVABLE_SUMMARY))
767 if cmd == 'list' or cmd == 'info' or cmd == 'install':
769 if arg.endswith(".rpm") and os.access(arg, os.R_OK):
771 cmdlinerepo = cmdline_repo({ 'alias': '@commandline', 'type': 'commandline' })
772 cmdlinerepo.load(pool)
773 cmdlinerepo['packages'] = {}
774 cmdlinerepo['packages'][arg] = cmdlinerepo['handle'].add_rpm(arg, Repo.REPO_REUSE_REPODATA|Repo.REPO_NO_INTERNALIZE)
776 cmdlinerepo['handle'].internalize()
778 addedprovides = pool.addfileprovides_ids()
780 sysrepo.updateaddedprovides(addedprovides)
782 repo.updateaddedprovides(addedprovides)
784 pool.createwhatprovides()
786 # convert arguments into jobs
789 if cmdlinerepo and arg in cmdlinerepo['packages']:
790 jobs.append(pool.Job(Job.SOLVER_SOLVABLE, cmdlinerepo['packages'][arg]))
792 njobs = mkjobs(pool, cmd, arg)
794 print "nothing matches '%s'" % arg
798 if cmd == 'list' or cmd == 'info':
800 print "no package matched."
803 for s in pool.jobsolvables(job):
805 print "Name: %s" % s.str()
806 print "Repo: %s" % s.repo.name
807 print "Summary: %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
808 str = s.lookup_str(solv.SOLVABLE_URL)
810 print "Url: %s" % str
811 str = s.lookup_str(solv.SOLVABLE_LICENSE)
813 print "License: %s" % str
814 print "Description:\n%s" % s.lookup_str(solv.SOLVABLE_DESCRIPTION)
817 print " - %s [%s]" % (s.str(), s.repo.name)
818 print " %s" % s.lookup_str(solv.SOLVABLE_SUMMARY)
821 if cmd == 'install' or cmd == 'erase' or cmd == 'up' or cmd == 'dup' or cmd == 'verify':
823 if cmd == 'up' or cmd == 'verify':
824 jobs = [ pool.Job(Job.SOLVER_SOLVABLE_ALL, 0) ]
828 print "no package matched."
832 # up magic: use install instead of update if no installed package matches
833 if job.how == Job.SOLVER_SOLVABLE_ALL or filter(lambda s: s.isinstalled(), pool.jobsolvables(job)):
834 job.how |= Job.SOLVER_UPDATE
836 job.how |= Job.SOLVER_INSTALL
837 elif cmd == 'install':
838 job.how |= Job.SOLVER_INSTALL
840 job.how |= Job.SOLVER_ERASE
842 job.how |= Job.SOLVER_DISTUPGRADE
843 elif cmd == 'verify':
844 job.how |= Job.SOLVER_VERIFY
846 #pool.set_debuglevel(2)
849 solver = pool.create_solver()
850 solver.ignorealreadyrecommended = True
852 solver.allowuninstall = True
853 if cmd == 'dup' and not jobs:
854 solver.distupgrade = True
855 solver.updatesystem = True
856 solver.allowdowngrade = True
857 solver.allowvendorchange = True
858 solver.allowarchchange = True
859 solver.dosplitprovides = True
860 if cmd == 'up' and len(jobs) == 1 and jobs[0].how == (Job.SOLVER_UPDATE | Job.SOLVER_SOLVABLE_ALL):
861 solver.dosplitprovides = True
862 problems = solver.solve(jobs)
865 for problem in problems:
866 print "Problem %d:" % problem.id
867 r = problem.findproblemrule()
869 print ri.problemstr()
870 solutions = problem.solutions()
871 for solution in solutions:
872 print " Solution %d:" % solution.id
873 elements = solution.elements()
874 for element in elements:
876 if etype == Solver.SOLVER_SOLUTION_JOB:
877 print " - do not ask to", jobs[element.jobidx].str()
878 elif etype == Solver.SOLVER_SOLUTION_INFARCH:
879 if element.solvable.isinstalled():
880 print " - keep %s despite the inferior architecture" % element.solvable.str()
882 print " - install %s despite the inferior architecture" % element.solvable.str()
883 elif etype == Solver.SOLVER_SOLUTION_DISTUPGRADE:
884 if element.solvable.isinstalled():
885 print " - keep obsolete %s" % element.solvable.str()
887 print " - install %s from excluded repository" % element.solvable.str()
888 elif etype == Solver.SOLVER_SOLUTION_REPLACE:
889 illegal = element.illegalreplace()
890 if illegal & solver.POLICY_ILLEGAL_DOWNGRADE:
891 print " - allow downgrade of %s to %s" % (element.solvable.str(), element.replacement.str())
892 if illegal & solver.POLICY_ILLEGAL_ARCHCHANGE:
893 print " - allow architecture change of %s to %s" % (element.solvable.str(), element.replacement.str())
894 if illegal & solver.POLICY_ILLEGAL_VENDORCHANGE:
895 if element.replacement.vendorid:
896 print " - allow vendor change from '%s' (%s) to '%s' (%s)" % (element.solvable.vendor, element.solvable.str(), element.replacement.vendor, element.replacement.str())
898 print " - allow vendor change from '%s' (%s) to no vendor (%s)" % (element.solvable.vendor, element.solvable.str(), element.replacement.str())
900 print " - allow replacement of %s with %s" % (element.solvable.str(), element.replacement.str())
901 elif etype == Solver.SOLVER_SOLUTION_ERASE:
902 print " - allow deinstallation of %s" % element.solvable.str()
904 while not (sol == 's' or sol == 'q' or (sol.isdigit() and int(sol) >= 1 and int(sol) <= len(solutions))):
905 sys.stdout.write("Please choose a solution: ")
907 sol = sys.stdin.readline().strip()
909 continue # skip problem
912 solution = solutions[int(sol) - 1]
913 for element in solution.elements():
915 if etype == Solver.SOLVER_SOLUTION_JOB:
916 jobs[element.jobidx] = pool.Job(Job.SOLVER_NOOP, 0)
918 newjob = element.Job()
921 if job.how == newjob.how and job.what == newjob.what:
926 # no problems, show transaction
927 trans = solver.transaction()
930 print "Nothing to do."
933 print "Transaction summary:"
935 for ctype, pkgs, fromid, toid in trans.classify():
936 if ctype == Transaction.SOLVER_TRANSACTION_ERASE:
937 print "%d erased packages:" % len(pkgs)
938 elif ctype == Transaction.SOLVER_TRANSACTION_INSTALL:
939 print "%d installed packages:" % len(pkgs)
940 elif ctype == Transaction.SOLVER_TRANSACTION_REINSTALLED:
941 print "%d reinstalled packages:" % len(pkgs)
942 elif ctype == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
943 print "%d downgraded packages:" % len(pkgs)
944 elif ctype == Transaction.SOLVER_TRANSACTION_CHANGED:
945 print "%d changed packages:" % len(pkgs)
946 elif ctype == Transaction.SOLVER_TRANSACTION_UPGRADED:
947 print "%d upgraded packages:" % len(pkgs)
948 elif ctype == Transaction.SOLVER_TRANSACTION_VENDORCHANGE:
949 print "%d vendor changes from '%s' to '%s':" % (len(pkgs), pool.id2str(fromid), pool.id2str(toid))
950 elif ctype == Transaction.SOLVER_TRANSACTION_ARCHCHANGE:
951 print "%d arch changes from '%s' to '%s':" % (len(pkgs), pool.id2str(fromid), pool.id2str(toid))
955 if ctype == Transaction.SOLVER_TRANSACTION_UPGRADED or ctype == Transaction.SOLVER_TRANSACTION_DOWNGRADED:
956 op = trans.othersolvable(p)
957 print " - %s -> %s" % (p.str(), op.str())
959 print " - %s" % p.str()
961 print "install size change: %d K" % trans.calc_installsizechange()
966 sys.stdout.write("OK to continue (y/n)? ")
968 yn = sys.stdin.readline().strip()
970 if yn == 'n': sys.exit(1)
971 newpkgs = trans.newpackages()
976 downloadsize += p.lookup_num(solv.SOLVABLE_DOWNLOADSIZE)
977 print "Downloading %d packages, %d K" % (len(newpkgs), downloadsize)
979 repo = p.repo.appdata
980 location, medianr = p.lookup_location()
983 if repo['type'] == 'commandline':
984 f = solv.xfopen(location)
986 sys.exit("\n%s: %s not found" % location)
989 if sysrepo['handle'].nsolvables and os.access('/usr/bin/applydeltarpm', os.X_OK):
991 di = p.repo.dataiterator_new(solv.SOLVID_META, solv.DELTA_PACKAGE_NAME, pname, Dataiterator.SEARCH_STRING)
992 di.prepend_keyname(solv.REPOSITORY_DELTAINFO)
995 if pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_EVR) != p.evrid or pool.lookup_id(solv.SOLVID_POS, solv.DELTA_PACKAGE_ARCH) != p.archid:
997 baseevrid = pool.lookup_id(solv.SOLVID_POS, solv.DELTA_BASE_EVR)
999 for installedp in pool.providers(p.nameid):
1000 if installedp.isinstalled() and installedp.nameid == p.nameid and installedp.archid == p.archid and installedp.evrid == baseevrid:
1001 candidate = installedp
1004 seq = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_EVR) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_SEQ_NUM)
1005 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, '-c', '-s', seq])
1008 chksum = pool.lookup_checksum(solv.SOLVID_POS, solv.DELTA_CHECKSUM)
1011 dloc = pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_DIR) + '/' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_NAME) + '-' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_EVR) + '.' + pool.lookup_str(solv.SOLVID_POS, solv.DELTA_LOCATION_SUFFIX)
1012 f = repo.download(dloc, False, chksum)
1015 nf = tempfile.TemporaryFile()
1016 nf = os.dup(nf.fileno())
1017 st = subprocess.call(['/usr/bin/applydeltarpm', '-a', p.arch, "/dev/fd/%d" % solv.xfileno(f), "/dev/fd/%d" % nf])
1019 os.lseek(nf, 0, os.SEEK_SET)
1020 newpkgsfp[p.id] = solv.xfopen_fd("", nf)
1022 if p.id in newpkgsfp:
1023 sys.stdout.write("d")
1027 if repo['type'] == 'yast2':
1028 datadir = repo['handle'].lookup_str(solv.SOLVID_META, solv.SUSETAGS_DATADIR)
1031 location = datadir + '/' + location
1032 chksum = p.lookup_checksum(solv.SOLVABLE_CHECKSUM)
1033 f = repo.download(location, False, chksum)
1035 sys.exit("\n%s: %s not found in repository" % (repo['alias'], location))
1037 sys.stdout.write(".")
1040 print "Committing transaction:"
1042 ts = rpm.TransactionSet('/')
1043 ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
1044 erasenamehelper = {}
1045 for p in trans.steps():
1046 type = trans.steptype(p, Transaction.SOLVER_TRANSACTION_RPM_ONLY)
1047 if type == Transaction.SOLVER_TRANSACTION_ERASE:
1048 rpmdbid = p.lookup_num(solv.RPM_RPMDBID)
1049 erasenamehelper[p.name] = p
1051 sys.exit("\ninternal error: installed package %s has no rpmdbid\n" % p.str())
1052 ts.addErase(rpmdbid)
1053 elif type == Transaction.SOLVER_TRANSACTION_INSTALL:
1055 h = ts.hdrFromFdno(solv.xfileno(f))
1056 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1057 ts.addInstall(h, p, 'u')
1058 elif type == Transaction.SOLVER_TRANSACTION_MULTIINSTALL:
1060 h = ts.hdrFromFdno(solv.xfileno(f))
1061 os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
1062 ts.addInstall(h, p, 'i')
1063 checkproblems = ts.check()
1068 def runCallback(reason, amount, total, p, d):
1069 if reason == rpm.RPMCALLBACK_INST_OPEN_FILE:
1070 return solv.xfileno(newpkgsfp[p.id])
1071 if reason == rpm.RPMCALLBACK_INST_START:
1072 print "install", p.str()
1073 if reason == rpm.RPMCALLBACK_UNINST_START:
1074 # argh, p is just the name of the package
1075 if p in erasenamehelper:
1076 p = erasenamehelper[p]
1077 print "erase", p.str()
1078 runproblems = ts.run(runCallback, '')
1084 print "unknown command", cmd