From: Ville Skyttä Date: Fri, 12 Feb 2010 21:38:13 +0000 (+0200) Subject: Trim trailing whitespace. X-Git-Tag: upstream/0.9.9~26^2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ce9638466182349019edaeea896bd9a5909bb4ef;p=tools%2Fcreaterepo.git Trim trailing whitespace. --- diff --git a/createrepo/__init__.py b/createrepo/__init__.py index 0c0c7d8..d29c9d8 100644 --- a/createrepo/__init__.py +++ b/createrepo/__init__.py @@ -11,7 +11,7 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# Copyright 2009 Red Hat, Inc - +# Copyright 2009 Red Hat, Inc - # written by seth vidal skvidal at fedoraproject.org import os @@ -61,21 +61,21 @@ class MetaDataConfig(object): self.groupfile = None self.sumtype = 'sha256' self.pretty = False - self.cachedir = None + self.cachedir = None self.use_cache = False self.basedir = os.getcwd() self.checkts = False - self.split = False + self.split = False self.update = False self.deltas = False # do the deltarpm thing # where to put the .drpms - defaults to 'drpms' inside 'repodata' - self.deltadir = None + self.deltadir = None self.delta_relative = 'drpms/' - self.oldpackage_paths = [] # where to look for the old packages - + self.oldpackage_paths = [] # where to look for the old packages - self.deltafile = 'prestodelta.xml.gz' self.num_deltas = 1 # number of older versions to delta (max) self.max_delta_rpm_size = 100000000 - self.update_md_path = None + self.update_md_path = None self.skip_stat = False self.database = False self.outputdir = None @@ -102,20 +102,20 @@ class MetaDataConfig(object): self.distro_tags = []# [(cpeid(None allowed), human-readable-string)] self.read_pkgs_list = None # filepath/name to write out list of pkgs # read in this run of createrepo - + class SimpleMDCallBack(object): def errorlog(self, thing): print >> sys.stderr, thing - + def log(self, thing): print thing - + def progress(self, item, current, total): sys.stdout.write('\r' + ' ' * 80) sys.stdout.write("\r%d/%d - %s" % (current, total, item)) sys.stdout.flush() - - + + class MetaDataGenerator: def __init__(self, config_obj=None, callback=None): self.conf = config_obj @@ -124,31 +124,31 @@ class MetaDataGenerator: if not callback: self.callback = SimpleMDCallBack() else: - self.callback = callback - - + self.callback = callback + + self.ts = rpmUtils.transaction.initReadOnlyTransaction() self.pkgcount = 0 self.current_pkg = 0 self.files = [] self.rpmlib_reqs = {} self.read_pkgs = [] - + if not self.conf.directory and not self.conf.directories: raise MDError, "No directory given on which to run." - + if not self.conf.directories: # just makes things easier later self.conf.directories = [self.conf.directory] if not self.conf.directory: # ensure we have both in the config object self.conf.directory = self.conf.directories[0] - + # the cachedir thing: if self.conf.cachedir: self.conf.use_cache = True - + # this does the dir setup we need done self._parse_directory() - self._test_setup_dirs() + self._test_setup_dirs() def _parse_directory(self): """pick up the first directory given to us and make sure we know @@ -160,11 +160,11 @@ class MetaDataGenerator: self.conf.basedir = os.path.realpath(self.conf.basedir) self.conf.relative_dir = self.conf.directory - self.package_dir = os.path.join(self.conf.basedir, + self.package_dir = os.path.join(self.conf.basedir, self.conf.relative_dir) - + if not self.conf.outputdir: - self.conf.outputdir = os.path.join(self.conf.basedir, + self.conf.outputdir = os.path.join(self.conf.basedir, self.conf.relative_dir) def _test_setup_dirs(self): @@ -196,7 +196,7 @@ class MetaDataGenerator: raise MDError, _('Cannot create/verify %s') % temp_final if self.conf.deltas: - temp_delta = os.path.join(self.conf.outputdir, + temp_delta = os.path.join(self.conf.outputdir, self.conf.delta_relative) if not checkAndMakeDir(temp_delta): raise MDError, _('Cannot create/verify %s') % temp_delta @@ -212,7 +212,7 @@ class MetaDataGenerator: direcs.append('deltadir') for direc in direcs: - filepath = os.path.join(self.conf.outputdir, getattr(self.conf, + filepath = os.path.join(self.conf.outputdir, getattr(self.conf, direc)) if os.path.exists(filepath): if not os.access(filepath, os.W_OK): @@ -287,9 +287,9 @@ class MetaDataGenerator: def errorlog(self, thing): """subclass this if you want something different....""" errorprint(thing) - + def checkTimeStamps(self): - """check the timestamp of our target dir. If it is not newer than + """check the timestamp of our target dir. If it is not newer than the repodata return False, else True""" if self.conf.checkts: dn = os.path.join(self.conf.basedir, self.conf.directory) @@ -301,9 +301,9 @@ class MetaDataGenerator: self.callback.errorlog(_('cannot get to file: %s') % fn) if os.path.getctime(fn) > self.conf.mdtimestamp: return False - + return True - + return False def trimRpms(self, files): @@ -346,22 +346,22 @@ class MetaDataGenerator: def _setup_grabber(self): if not hasattr(self, '_grabber'): self._grabber = grabber.URLGrabber() - + return self._grabber grabber = property(fget = lambda self: self._setup_grabber()) - - + + def doPkgMetadata(self): """all the heavy lifting for the package metadata""" if self.conf.update: - self._setup_old_metadata_lookup() + self._setup_old_metadata_lookup() # rpms we're going to be dealing with if self.conf.pkglist: packages = self.conf.pkglist else: packages = self.getFileList(self.package_dir, '.rpm') - + if not isinstance(packages, MetaSack): packages = self.trimRpms(packages) self.pkgcount = len(packages) @@ -371,8 +371,8 @@ class MetaDataGenerator: self.closeMetadataDocs() except (IOError, OSError), e: raise MDError, _('Cannot access/write repodata files: %s') % e - - + + def openMetadataDocs(self): if self.conf.database_only: self.setup_sqlite_dbs() @@ -385,7 +385,7 @@ class MetaDataGenerator: def _setupPrimary(self): # setup the primary metadata file - primaryfilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, + primaryfilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, self.conf.primaryfile) fo = _gzipOpen(primaryfilepath, 'w') fo.write('\n') @@ -396,17 +396,17 @@ class MetaDataGenerator: def _setupFilelists(self): # setup the filelist file - filelistpath = os.path.join(self.conf.outputdir, self.conf.tempdir, + filelistpath = os.path.join(self.conf.outputdir, self.conf.tempdir, self.conf.filelistsfile) fo = _gzipOpen(filelistpath, 'w') fo.write('\n') fo.write('' % self.pkgcount) return fo - + def _setupOther(self): # setup the other file - otherfilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, + otherfilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, self.conf.otherfile) fo = _gzipOpen(otherfilepath, 'w') fo.write('\n') @@ -417,13 +417,13 @@ class MetaDataGenerator: def _setupDelta(self): # setup the other file - deltafilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, + deltafilepath = os.path.join(self.conf.outputdir, self.conf.tempdir, self.conf.deltafile) fo = _gzipOpen(deltafilepath, 'w') fo.write('\n') fo.write('\n') return fo - + def read_in_package(self, rpmfile, pkgpath=None, reldir=None): """rpmfile == relative path to file from self.packge_dir""" @@ -434,15 +434,15 @@ class MetaDataGenerator: if not rpmfile.strip(): raise MDError, "Blank filename passed in, skipping" - + if rpmfile.find("://") != -1: - + if not hasattr(self, 'tempdir'): self.tempdir = tempfile.mkdtemp() - + pkgname = os.path.basename(rpmfile) baseurl = os.path.dirname(rpmfile) - reldir = self.tempdir + reldir = self.tempdir dest = os.path.join(self.tempdir, pkgname) if not self.conf.quiet: self.callback.log('\nDownloading %s' % rpmfile) @@ -452,12 +452,12 @@ class MetaDataGenerator: raise MDError, "Unable to retrieve remote package %s: %s" % ( rpmfile, e) - + else: rpmfile = '%s/%s' % (pkgpath, rpmfile) - + try: - po = yumbased.CreateRepoPackage(self.ts, rpmfile, + po = yumbased.CreateRepoPackage(self.ts, rpmfile, sumtype=self.conf.sumtype) except Errors.MiscError, e: raise MDError, "Unable to open package: %s" % e @@ -469,17 +469,17 @@ class MetaDataGenerator: for r in po.requires_print: if r.startswith('rpmlib('): self.rpmlib_reqs[r] = 1 - + if po.checksum in (None, ""): raise MDError, "No Package ID found for package %s, not going to" \ " add it" % po - + return po def writeMetadataDocs(self, pkglist=[], pkgpath=None): if not pkglist: - pkglist = self.conf.pkglist + pkglist = self.conf.pkglist if not pkgpath: directory = self.conf.directory @@ -489,7 +489,7 @@ class MetaDataGenerator: for pkg in pkglist: self.current_pkg += 1 recycled = False - + # look to see if we can get the data from the old repodata # if so write this one out that way if self.conf.update: @@ -501,21 +501,21 @@ class MetaDataGenerator: nodes = self.oldData.getNodes(old_pkg) if nodes is not None: recycled = True - + # FIXME also open up the delta file - + # otherwise do it individually if not recycled: #scan rpm files - if not pkgpath: + if not pkgpath: reldir = os.path.join(self.conf.basedir, directory) else: reldir = pkgpath - + if not isinstance(pkg, YumAvailablePackage): try: - po = self.read_in_package(pkg, pkgpath=pkgpath, + po = self.read_in_package(pkg, pkgpath=pkgpath, reldir=reldir) except MDError, e: # need to say something here @@ -525,11 +525,11 @@ class MetaDataGenerator: if self.conf.deltas: self._do_delta_rpm_package(po) self.read_pkgs.append(pkg) - + else: po = pkg self.read_pkgs.append(po.localpath) - + if self.conf.database_only: pass # disabled right now for sanity reasons (mine) #po.do_sqlite_dump(self.md_sqlite) @@ -540,9 +540,9 @@ class MetaDataGenerator: clog_limit=self.conf.changelog_limit)) else: if self.conf.verbose: - self.callback.log(_("Using data from old metadata for %s") + self.callback.log(_("Using data from old metadata for %s") % pkg) - (primarynode, filenode, othernode) = nodes + (primarynode, filenode, othernode) = nodes for node, outfile in ((primarynode, self.primaryfile), (filenode, self.flfile), @@ -576,7 +576,7 @@ class MetaDataGenerator: if not self.conf.quiet: if self.conf.verbose: - self.callback.log('%d/%d - %s' % (self.current_pkg, + self.callback.log('%d/%d - %s' % (self.current_pkg, self.pkgcount, pkg)) else: self.callback.progress(pkg, self.current_pkg, self.pkgcount) @@ -588,7 +588,7 @@ class MetaDataGenerator: if not self.conf.quiet: self.callback.log('') - + # save them up to the tmp locations: if not self.conf.quiet: self.callback.log(_('Saving Primary metadata')) @@ -631,14 +631,14 @@ class MetaDataGenerator: drpm_pkg_time = time.time() # duck and cover if the pkg.size is > whatever if int(pkg.size) > self.conf.max_delta_rpm_size: - if not self.conf.quiet: + if not self.conf.quiet: self.callback.log("Skipping %s package " \ "that is > max_delta_rpm_size" % pkg) return # generate a list of all the potential 'old rpms' opd = self._get_old_package_dict() - # for each of our old_package_paths - + # for each of our old_package_paths - # make a drpm from the newest of that pkg # get list of potential candidates which are likely to match for d in self.conf.oldpackage_paths: @@ -648,11 +648,11 @@ class MetaDataGenerator: for fn in opd[d]: if os.path.basename(fn).startswith(pkg.name): pot_cand.append(fn) - + candidates = [] for fn in pot_cand: try: - thispo = yumbased.CreateRepoPackage(self.ts, fn, + thispo = yumbased.CreateRepoPackage(self.ts, fn, sumtype=self.conf.sumtype) except Errors.MiscError, e: continue @@ -675,13 +675,13 @@ class MetaDataGenerator: self.callback.log('created drpm from %s to %s: %s in %0.3f' % ( delta_p, pkg, drpmfn, (time.time() - dt_st))) if self.conf.profile: - self.callback.log('total drpm time for %s: %0.3f' % (pkg, + self.callback.log('total drpm time for %s: %0.3f' % (pkg, (time.time() - drpm_pkg_time))) def _get_old_package_dict(self): if hasattr(self, '_old_package_dict'): return self._old_package_dict - + self._old_package_dict = {} opl = [] for d in self.conf.oldpackage_paths: @@ -695,7 +695,7 @@ class MetaDataGenerator: if not self._old_package_dict.has_key(d): self._old_package_dict[d] = [] self._old_package_dict[d].append(d + '/' + f) - + return self._old_package_dict def generate_delta_xml(self): @@ -709,17 +709,17 @@ class MetaDataGenerator: targets = {} results = [] for drpm_fn in self.getFileList(self.conf.deltadir, 'drpm'): - drpm_rel_fn = os.path.normpath(self.conf.delta_relative + + drpm_rel_fn = os.path.normpath(self.conf.delta_relative + '/' + drpm_fn) # this is annoying - drpm_po = yumbased.CreateRepoPackage(self.ts, + drpm_po = yumbased.CreateRepoPackage(self.ts, self.conf.deltadir + '/' + drpm_fn, sumtype=self.conf.sumtype) - - drpm = deltarpms.DeltaRPMPackage(drpm_po, self.conf.outputdir, + + drpm = deltarpms.DeltaRPMPackage(drpm_po, self.conf.outputdir, drpm_rel_fn) if not targets.has_key(drpm_po.pkgtup): targets[drpm_po.pkgtup] = [] targets[drpm_po.pkgtup].append(drpm.xml_dump_metadata()) - + for (n, a, e, v, r) in targets.keys(): results.append(""" \n""" % ( n, e, v, r, a)) @@ -731,12 +731,12 @@ class MetaDataGenerator: return ' '.join(results) - def addArbitraryMetadata(self, mdfile, mdtype, xml_node, compress=True, + def addArbitraryMetadata(self, mdfile, mdtype, xml_node, compress=True, compress_type='gzip', attribs={}): """add random metadata to the repodata dir and repomd.xml mdfile = complete path to file mdtype = the metadata type to use - xml_node = the node of the repomd xml object to append this + xml_node = the node of the repomd xml object to append this data onto compress = compress the file before including it """ @@ -756,23 +756,23 @@ class MetaDataGenerator: else: outfn = os.path.join(outdir, sfile) output = open(outfn, 'w') - + output.write(fo.read()) output.close() fo.seek(0) open_csum = misc.checksum(self.conf.sumtype, fo) fo.close() - + if self.conf.unique_md_filenames: (csum, outfn) = checksum_and_rename(outfn, self.conf.sumtype) sfile = os.path.basename(outfn) else: if compress: - csum = misc.checksum(self.conf.sumtype, outfn) + csum = misc.checksum(self.conf.sumtype, outfn) else: csum = open_csum - + timest = os.stat(outfn)[8] # add all this garbage into the xml node like: @@ -793,19 +793,19 @@ class MetaDataGenerator: # add the random stuff for (k, v) in attribs.items(): data.newChild(None, k, str(v)) - - + + def doRepoMetadata(self): - """wrapper to generate the repomd.xml file that stores the info + """wrapper to generate the repomd.xml file that stores the info on the other files""" repodoc = libxml2.newDoc("1.0") reporoot = repodoc.newChild(None, "repomd", None) repons = reporoot.newNs('http://linux.duke.edu/metadata/repo', None) reporoot.setNs(repons) - rpmns = reporoot.newNs("http://linux.duke.edu/metadata/rpm", 'rpm') + rpmns = reporoot.newNs("http://linux.duke.edu/metadata/rpm", 'rpm') repopath = os.path.join(self.conf.outputdir, self.conf.tempdir) repofilepath = os.path.join(repopath, self.conf.repomdfile) - + revision = reporoot.newChild(None, 'revision', self.conf.revision) if self.conf.content_tags or self.conf.distro_tags: tags = reporoot.newChild(None, 'tags', None) @@ -822,14 +822,14 @@ class MetaDataGenerator: db_workfiles = [(self.md_sqlite.pri_sqlite_file, 'primary_db'), (self.md_sqlite.file_sqlite_file, 'filelists_db'), (self.md_sqlite.other_sqlite_file, 'other_db')] - dbversion = '10' + dbversion = '10' else: workfiles = [(self.conf.otherfile, 'other',), (self.conf.filelistsfile, 'filelists'), (self.conf.primaryfile, 'primary')] db_workfiles = [] repoid = 'garbageid' - + if self.conf.deltas: workfiles.append((self.conf.deltafile, 'prestodelta')) if self.conf.database: @@ -842,7 +842,7 @@ class MetaDataGenerator: for (rpm_file, ftype) in workfiles: complete_path = os.path.join(repopath, rpm_file) - + zfo = _gzipOpen(complete_path) # This is misc.checksum() done locally so we can get the size too. data = misc.Checksums([sumtype]) @@ -856,28 +856,28 @@ class MetaDataGenerator: db_csums = {} db_compressed_sums = {} - + if self.conf.database: if ftype in ['primary', 'filelists', 'other']: if self.conf.verbose: - self.callback.log("Starting %s db creation: %s" % (ftype, + self.callback.log("Starting %s db creation: %s" % (ftype, time.ctime())) - + if ftype == 'primary': rp.getPrimary(complete_path, csum) - + elif ftype == 'filelists': rp.getFilelists(complete_path, csum) - + elif ftype == 'other': rp.getOtherdata(complete_path, csum) - - if ftype in ['primary', 'filelists', 'other']: + + if ftype in ['primary', 'filelists', 'other']: tmp_result_name = '%s.xml.gz.sqlite' % ftype tmp_result_path = os.path.join(repopath, tmp_result_name) good_name = '%s.sqlite' % ftype resultpath = os.path.join(repopath, good_name) - + # rename from silly name to not silly name os.rename(tmp_result_path, resultpath) compressed_name = '%s.bz2' % good_name @@ -887,7 +887,7 @@ class MetaDataGenerator: # compress the files bzipFile(resultpath, result_compressed) # csum the compressed file - db_compressed_sums[ftype] = misc.checksum(sumtype, + db_compressed_sums[ftype] = misc.checksum(sumtype, result_compressed) # timestamp+size the uncompressed file un_stat = os.stat(resultpath) @@ -897,44 +897,44 @@ class MetaDataGenerator: if self.conf.unique_md_filenames: csum_compressed_name = '%s-%s.bz2' % ( db_compressed_sums[ftype], good_name) - csum_result_compressed = os.path.join(repopath, + csum_result_compressed = os.path.join(repopath, csum_compressed_name) os.rename(result_compressed, csum_result_compressed) result_compressed = csum_result_compressed compressed_name = csum_compressed_name - + # timestamp+size the compressed file db_stat = os.stat(result_compressed) - + # add this data as a section to the repomdxml db_data_type = '%s_db' % ftype data = reporoot.newChild(None, 'data', None) data.newProp('type', db_data_type) location = data.newChild(None, 'location', None) - + if self.conf.baseurl is not None: location.newProp('xml:base', self.conf.baseurl) - - location.newProp('href', os.path.join(self.conf.finaldir, + + location.newProp('href', os.path.join(self.conf.finaldir, compressed_name)) - checksum = data.newChild(None, 'checksum', + checksum = data.newChild(None, 'checksum', db_compressed_sums[ftype]) checksum.newProp('type', sumtype) - db_tstamp = data.newChild(None, 'timestamp', + db_tstamp = data.newChild(None, 'timestamp', str(db_stat.st_mtime)) data.newChild(None, 'size', str(db_stat.st_size)) data.newChild(None, 'open-size', str(un_stat.st_size)) - unchecksum = data.newChild(None, 'open-checksum', + unchecksum = data.newChild(None, 'open-checksum', db_csums[ftype]) unchecksum.newProp('type', sumtype) - database_version = data.newChild(None, 'database_version', + database_version = data.newChild(None, 'database_version', dbversion) if self.conf.verbose: - self.callback.log("Ending %s db creation: %s" % (ftype, + self.callback.log("Ending %s db creation: %s" % (ftype, time.ctime())) - - + + data = reporoot.newChild(None, 'data', None) data.newProp('type', ftype) @@ -954,33 +954,33 @@ class MetaDataGenerator: orig_file = os.path.join(repopath, rpm_file) dest_file = os.path.join(repopath, res_file) os.rename(orig_file, dest_file) - + else: res_file = rpm_file - rpm_file = res_file - + rpm_file = res_file + location.newProp('href', os.path.join(self.conf.finaldir, rpm_file)) - if not self.conf.quiet and self.conf.database: + if not self.conf.quiet and self.conf.database: self.callback.log('Sqlite DBs complete') for (fn, ftype) in db_workfiles: attribs = {'database_version':dbversion} - self.addArbitraryMetadata(fn, ftype, reporoot, compress=True, + self.addArbitraryMetadata(fn, ftype, reporoot, compress=True, compress_type='bzip2', attribs=attribs) try: os.unlink(fn) except (IOError, OSError), e: pass - + if self.conf.groupfile is not None: self.addArbitraryMetadata(self.conf.groupfile, 'group_gz', reporoot) - self.addArbitraryMetadata(self.conf.groupfile, 'group', reporoot, + self.addArbitraryMetadata(self.conf.groupfile, 'group', reporoot, compress=False) - + if self.conf.additional_metadata: for md_type, mdfile in self.conf.additional_metadata.items(): self.addArbitraryMetadata(mdfile, md_type, reporoot) @@ -990,27 +990,27 @@ class MetaDataGenerator: # rpmlib = reporoot.newChild(rpmns, 'lib', None) # for r in self.rpmlib_reqs.keys(): # req = rpmlib.newChild(rpmns, 'requires', r) - - + + # save it down try: repodoc.saveFormatFileEnc(repofilepath, 'UTF-8', 1) except: self.callback.errorlog( _('Error saving temp file for repomd.xml: %s') % repofilepath) - raise MDError, 'Could not save temp file: %s' % repofilepath + raise MDError, 'Could not save temp file: %s' % repofilepath del repodoc def doFinalMove(self): """move the just-created repodata from .repodata to repodata - also make sure to preserve any files we didn't mess with in the + also make sure to preserve any files we didn't mess with in the metadata dir""" - - output_final_dir = os.path.join(self.conf.outputdir, self.conf.finaldir) + + output_final_dir = os.path.join(self.conf.outputdir, self.conf.finaldir) output_old_dir = os.path.join(self.conf.outputdir, self.conf.olddir) - + if os.path.exists(output_final_dir): try: os.rename(output_final_dir, output_old_dir) @@ -1027,7 +1027,7 @@ class MetaDataGenerator: os.rename(output_old_dir, output_final_dir) raise MDError, _('Error moving final metadata into place') - for f in ['primaryfile', 'filelistsfile', 'otherfile', 'repomdfile', + for f in ['primaryfile', 'filelistsfile', 'otherfile', 'repomdfile', 'groupfile']: if getattr(self.conf, f): fn = os.path.basename(getattr(self.conf, f)) @@ -1051,11 +1051,11 @@ class MetaDataGenerator: 'other.xml.gz','filelists.xml.gz'): os.remove(oldfile) # kill off the old ones continue - if f in ('filelists.sqlite.bz2', 'other.sqlite.bz2', + if f in ('filelists.sqlite.bz2', 'other.sqlite.bz2', 'primary.sqlite.bz2'): os.remove(oldfile) continue - + if os.path.exists(finalfile): # Hmph? Just leave it alone, then. try: @@ -1077,11 +1077,11 @@ class MetaDataGenerator: try: os.rmdir(output_old_dir) except OSError, e: - self.errorlog(_('Could not remove old metadata dir: %s') + self.errorlog(_('Could not remove old metadata dir: %s') % self.conf.olddir) self.errorlog(_('Error was %s') % e) self.errorlog(_('Please clean up this directory manually.')) - + # write out the read_pkgs_list file with self.read_pkgs if self.conf.read_pkgs_list: try: @@ -1090,7 +1090,7 @@ class MetaDataGenerator: fo.flush() fo.close() except (OSError, IOError), e: - self.errorlog(_('Could not write out readpkgs list: %s') + self.errorlog(_('Could not write out readpkgs list: %s') % self.conf.read_pkgs_list) self.errorlog(_('Error was %s') % e) @@ -1102,9 +1102,9 @@ class MetaDataGenerator: except sqlite.OperationalError, e: raise MDError, _('Cannot create sqlite databases: %s.\n'\ 'Maybe you need to clean up a .repodata dir?') % e - - - + + + class SplitMetaDataGenerator(MetaDataGenerator): """takes a series of dirs and creates repodata for all of them most commonly used with -u media:// - if no outputdir is specified @@ -1112,7 +1112,7 @@ class SplitMetaDataGenerator(MetaDataGenerator): """ def __init__(self, config_obj=None, callback=None): MetaDataGenerator.__init__(self, config_obj=config_obj, callback=None) - + def _getFragmentUrl(self, url, fragment): import urlparse urlparse.uses_fragment.append('media') @@ -1147,7 +1147,7 @@ class SplitMetaDataGenerator(MetaDataGenerator): if self.conf.update: self._setup_old_metadata_lookup() - + filematrix = {} for mydir in self.conf.directories: if os.path.isabs(mydir): @@ -1157,7 +1157,7 @@ class SplitMetaDataGenerator(MetaDataGenerator): thisdir = os.path.realpath(mydir) else: thisdir = os.path.join(self.conf.basedir, mydir) - + filematrix[mydir] = self.getFileList(thisdir, '.rpm') self.trimRpms(filematrix[mydir]) self.pkgcount += len(filematrix[mydir]) @@ -1190,13 +1190,13 @@ class MetaDataSqlite(object): self.primary_cursor = self.pri_cx.cursor() self.filelists_cursor = self.file_cx.cursor() - + self.other_cursor = self.other_cx.cursor() - + self.create_primary_db() self.create_filelists_db() self.create_other_db() - + def create_primary_db(self): # make the tables schema = [ @@ -1218,17 +1218,17 @@ class MetaDataSqlite(object): """CREATE INDEX pkgrequires on requires (pkgKey);""", """CREATE INDEX providesname ON provides (name);""", """CREATE INDEX requiresname ON requires (name);""", - """CREATE TRIGGER removals AFTER DELETE ON packages - BEGIN - DELETE FROM files WHERE pkgKey = old.pkgKey; - DELETE FROM requires WHERE pkgKey = old.pkgKey; - DELETE FROM provides WHERE pkgKey = old.pkgKey; - DELETE FROM conflicts WHERE pkgKey = old.pkgKey; + """CREATE TRIGGER removals AFTER DELETE ON packages + BEGIN + DELETE FROM files WHERE pkgKey = old.pkgKey; + DELETE FROM requires WHERE pkgKey = old.pkgKey; + DELETE FROM provides WHERE pkgKey = old.pkgKey; + DELETE FROM conflicts WHERE pkgKey = old.pkgKey; DELETE FROM obsoletes WHERE pkgKey = old.pkgKey; END;""", """INSERT into db_info values (%s, 'direct_create');""" % sqlitecachec.DBVERSION, ] - + for cmd in schema: executeSQL(self.primary_cursor, cmd) @@ -1242,15 +1242,15 @@ class MetaDataSqlite(object): """CREATE INDEX dirnames ON filelist (dirname);""", """CREATE INDEX keyfile ON filelist (pkgKey);""", """CREATE INDEX pkgId ON packages (pkgId);""", - """CREATE TRIGGER remove_filelist AFTER DELETE ON packages - BEGIN - DELETE FROM filelist WHERE pkgKey = old.pkgKey; + """CREATE TRIGGER remove_filelist AFTER DELETE ON packages + BEGIN + DELETE FROM filelist WHERE pkgKey = old.pkgKey; END;""", - """INSERT into db_info values (%s, 'direct_create');""" % sqlitecachec.DBVERSION, + """INSERT into db_info values (%s, 'direct_create');""" % sqlitecachec.DBVERSION, ] for cmd in schema: executeSQL(self.filelists_cursor, cmd) - + def create_other_db(self): schema = [ """PRAGMA synchronous="OFF";""", @@ -1260,13 +1260,12 @@ class MetaDataSqlite(object): """CREATE TABLE packages ( pkgKey INTEGER PRIMARY KEY, pkgId TEXT);""", """CREATE INDEX keychange ON changelog (pkgKey);""", """CREATE INDEX pkgId ON packages (pkgId);""", - """CREATE TRIGGER remove_changelogs AFTER DELETE ON packages - BEGIN - DELETE FROM changelog WHERE pkgKey = old.pkgKey; + """CREATE TRIGGER remove_changelogs AFTER DELETE ON packages + BEGIN + DELETE FROM changelog WHERE pkgKey = old.pkgKey; END;""", - """INSERT into db_info values (%s, 'direct_create');""" % sqlitecachec.DBVERSION, + """INSERT into db_info values (%s, 'direct_create');""" % sqlitecachec.DBVERSION, ] - + for cmd in schema: executeSQL(self.other_cursor, cmd) - diff --git a/createrepo/deltarpms.py b/createrepo/deltarpms.py index d14d3fa..3edcbb5 100644 --- a/createrepo/deltarpms.py +++ b/createrepo/deltarpms.py @@ -47,7 +47,7 @@ class DeltaRPMPackage: del fo del fd self._getDRPMInfo(os.path.join(basedir, filename)) - + def _stringToNEVR(self, string): i = string.rfind("-", 0, string.rfind("-")-1) name = string[:i] @@ -60,13 +60,13 @@ class DeltaRPMPackage: length = length * 256 length += ord(val) return length - + def _getDRPMInfo(self, filename): d = deltarpm.readDeltaRPM(filename) self.oldnevrstring = d['old_nevr'] self.oldnevr = self._stringToNEVR(d['old_nevr']) self.sequence = d['seq'] - + def _stringToVersion(self, strng): i = strng.find(':') if i != -1: @@ -89,7 +89,7 @@ class DeltaRPMPackage: return (epoch, version, release) def xml_dump_metadata(self): - """takes an xml doc object and a package metadata entry node, populates a + """takes an xml doc object and a package metadata entry node, populates a package node with the md information""" (oldname, oldepoch, oldver, oldrel) = self.oldnevr @@ -120,8 +120,5 @@ def create_drpm(old_pkg, new_pkg, destdir): if code: print "Error genDeltaRPM for %s: exitcode was %s - Reported Error: %s" % (old_pkg.name, code, out) return None - - return delta_rpm_path - - + return delta_rpm_path diff --git a/createrepo/merge.py b/createrepo/merge.py index d4b8359..78514a3 100644 --- a/createrepo/merge.py +++ b/createrepo/merge.py @@ -70,7 +70,7 @@ class RepoMergeBase: for repo in repos: for pkg in repo.sack: others = self.yumbase.pkgSack.searchNevra(name=pkg.name, arch=pkg.arch) - # NOTE the above is definitely going to catch other versions which may + # NOTE the above is definitely going to catch other versions which may # be an invalid comparison if len(others) > 1: for thatpkg in others: @@ -95,7 +95,7 @@ class RepoMergeBase: myrepos = self.yumbase.repos.listEnabled() self.sort_func(myrepos) - + def write_metadata(self, outputdir=None): mytempdir = tempfile.mkdtemp() @@ -105,7 +105,7 @@ class RepoMergeBase: compsfile.write(self.yumbase.comps.xml()) compsfile.close() self.mdconf.groupfile=comps_fn - + if self.updateinfo: ui_fn = mytempdir + '/updateinfo.xml' uifile = open(ui_fn, 'w') @@ -114,7 +114,7 @@ class RepoMergeBase: try: # attempt to grab the updateinfo.xml.gz from the repodata umd.add(repo) except yum.Errors.RepoMDError: - continue + continue umd.xml(fileobj=uifile) uifile.close() self.mdconf.additional_metadata['updateinfo'] = ui_fn diff --git a/createrepo/readMetadata.py b/createrepo/readMetadata.py index 6711d30..4f13662 100644 --- a/createrepo/readMetadata.py +++ b/createrepo/readMetadata.py @@ -42,7 +42,7 @@ class MetadataIndex(object): otherfile = os.path.join(self.outputdir, o) else: basefile = filelistfile = otherfile = "" - + self.files = {'base' : basefile, 'filelist' : filelistfile, 'other' : otherfile} @@ -95,7 +95,7 @@ class MetadataIndex(object): mtime = None size = None relpath = None - do_stat = self.opts.get('do_stat', True) + do_stat = self.opts.get('do_stat', True) while node is not None: if node.type != "element": node = node.next @@ -121,7 +121,7 @@ class MetadataIndex(object): if size is None: print _("size missing for %s") % relpath return - if do_stat: + if do_stat: filepath = os.path.join(self.opts['pkgdir'], relpath) try: st = os.stat(filepath) @@ -205,9 +205,9 @@ class MetadataIndex(object): if __name__ == "__main__": cwd = os.getcwd() - opts = {'verbose':1, + opts = {'verbose':1, 'pkgdir': cwd} - + idx = MetadataIndex(cwd, opts) for fn in idx.basenodes.keys(): a,b,c, = idx.getNodes(fn) @@ -215,5 +215,3 @@ if __name__ == "__main__": b.serialize() c.serialize() idx.freeNodes(fn) - - diff --git a/createrepo/utils.py b/createrepo/utils.py index fb23964..3fa077f 100644 --- a/createrepo/utils.py +++ b/createrepo/utils.py @@ -53,21 +53,21 @@ class GzipFile(gzip.GzipFile): def _gzipOpen(filename, mode="rb", compresslevel=9): return GzipFile(filename, mode, compresslevel) - + def bzipFile(source, dest): - + s_fn = open(source, 'rb') destination = bz2.BZ2File(dest, 'w', compresslevel=9) while True: data = s_fn.read(1024000) - + if not data: break destination.write(data) destination.close() s_fn.close() - + def returnFD(filename): try: diff --git a/createrepo/yumbased.py b/createrepo/yumbased.py index f04f4fb..acb5851 100644 --- a/createrepo/yumbased.py +++ b/createrepo/yumbased.py @@ -46,13 +46,13 @@ class CreateRepoPackage(YumLocalPackage): # not using the cachedir if not self._cachedir: self._checksum = misc.checksum(self.checksum_type, self.localpath) - self._checksums = [(self.checksum_type, self._checksum, 1)] + self._checksums = [(self.checksum_type, self._checksum, 1)] return self._checksum t = [] if type(self.hdr[rpm.RPMTAG_SIGGPG]) is not types.NoneType: - t.append("".join(self.hdr[rpm.RPMTAG_SIGGPG])) + t.append("".join(self.hdr[rpm.RPMTAG_SIGGPG])) if type(self.hdr[rpm.RPMTAG_SIGPGP]) is not types.NoneType: t.append("".join(self.hdr[rpm.RPMTAG_SIGPGP])) if type(self.hdr[rpm.RPMTAG_HDRID]) is not types.NoneType: @@ -61,7 +61,7 @@ class CreateRepoPackage(YumLocalPackage): kcsum = misc.Checksums(checksums=[self.checksum_type]) kcsum.update("".join(t)) key = kcsum.hexdigest() - + csumtag = '%s-%s-%s-%s' % (os.path.basename(self.localpath), key, self.size, self.filetime) csumfile = '%s/%s' % (self._cachedir, csumtag) @@ -70,7 +70,7 @@ class CreateRepoPackage(YumLocalPackage): csumo = open(csumfile, 'r') checksum = csumo.readline() csumo.close() - + else: checksum = misc.checksum(self.checksum_type, self.localpath) @@ -84,19 +84,19 @@ class CreateRepoPackage(YumLocalPackage): os.rename(tmpfilename, csumfile) except: pass - + self._checksum = checksum self._checksums = [(self.checksum_type, checksum, 1)] return self._checksum - + # sqlite-direct dump code below here :-/ def _sqlite_null(self, item): if not item: return None return item - + def do_primary_sqlite_dump(self, cur): """insert primary data in place, this assumes the tables exist""" if self.crp_reldir and self.localpath.startswith(self.crp_reldir): @@ -106,38 +106,38 @@ class CreateRepoPackage(YumLocalPackage): relpath = self.localpath p = (self.crp_packagenumber, self.checksum, self.name, self.arch, - self.version, self.epoch, self.release, self.summary.strip(), - self.description.strip(), self._sqlite_null(self.url), self.filetime, - self.buildtime, self._sqlite_null(self.license), - self._sqlite_null(self.vendor), self._sqlite_null(self.group), - self._sqlite_null(self.buildhost), self._sqlite_null(self.sourcerpm), - self.hdrstart, self.hdrend, self._sqlite_null(self.packager), - self.packagesize, self.size, self.archivesize, relpath, + self.version, self.epoch, self.release, self.summary.strip(), + self.description.strip(), self._sqlite_null(self.url), self.filetime, + self.buildtime, self._sqlite_null(self.license), + self._sqlite_null(self.vendor), self._sqlite_null(self.group), + self._sqlite_null(self.buildhost), self._sqlite_null(self.sourcerpm), + self.hdrstart, self.hdrend, self._sqlite_null(self.packager), + self.packagesize, self.size, self.archivesize, relpath, self.crp_baseurl, self.checksum_type) - + q = """insert into packages values (?, ?, ?, ?, ?, ?, - ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?, ?, ?, ?, ?, ?)""" - - # write out all of do_primary_sqlite as an executescript - work on the + + # write out all of do_primary_sqlite as an executescript - work on the # quoting for pretty much any contingency - take from sqlutils.py - # + # # e #p = None #q = """insert into packages values (%s, %s, %s, %s, """ - + cur.execute(q, p) - # provides, obsoletes, conflicts + # provides, obsoletes, conflicts for pco in ('obsoletes', 'provides', 'conflicts'): thispco = [] for (name, flag, (epoch, ver, rel)) in getattr(self, pco): thispco.append((name, flag, epoch, ver, rel, self.crp_packagenumber)) - q = "insert into %s values (?, ?, ?, ?, ?, ?)" % pco + q = "insert into %s values (?, ?, ?, ?, ?, ?)" % pco cur.executemany(q, thispco) - # requires + # requires reqs = [] for (name, flag, (epoch, ver, rel), pre) in self._requires_with_pre(): if name.startswith('rpmlib('): @@ -146,18 +146,18 @@ class CreateRepoPackage(YumLocalPackage): if pre == 1: pre_bool = 'TRUE' reqs.append((name, flag, epoch, ver,rel, self.crp_packagenumber, pre_bool)) - q = "insert into requires values (?, ?, ?, ?, ?, ?, ?)" + q = "insert into requires values (?, ?, ?, ?, ?, ?, ?)" cur.executemany(q, reqs) # files p = [] for f in self._return_primary_files(): p.append((f,)) - + if p: q = "insert into files values (?, 'file', %s)" % self.crp_packagenumber cur.executemany(q, p) - + # dirs p = [] for f in self._return_primary_dirs(): @@ -165,8 +165,8 @@ class CreateRepoPackage(YumLocalPackage): if p: q = "insert into files values (?, 'dir', %s)" % self.crp_packagenumber cur.executemany(q, p) - - + + # ghosts p = [] for f in self._return_primary_files(list_of_files = self.returnFileEntries('ghost')): @@ -174,17 +174,17 @@ class CreateRepoPackage(YumLocalPackage): if p: q = "insert into files values (?, 'ghost', %s)" % self.crp_packagenumber cur.executemany(q, p) - - + + def do_filelists_sqlite_dump(self, cur): """inserts filelists data in place, this assumes the tables exist""" # insert packagenumber + checksum into 'packages' table q = 'insert into packages values (?, ?)' p = (self.crp_packagenumber, self.checksum) - + cur.execute(q, p) - + # break up filelists and encode them dirs = {} for (filetype, files) in [('file', self.filelist), ('dir', self.dirlist), @@ -199,27 +199,27 @@ class CreateRepoPackage(YumLocalPackage): # insert packagenumber|dir|files|types into files table p = [] for (dirname,direc) in dirs.items(): - p.append((self.crp_packagenumber, dirname, + p.append((self.crp_packagenumber, dirname, utils.encodefilenamelist(direc['files']), utils.encodefiletypelist(direc['types']))) if p: q = 'insert into filelist values (?, ?, ?, ?)' cur.executemany(q, p) - - + + def do_other_sqlite_dump(self, cur): - """inserts changelog data in place, this assumes the tables exist""" + """inserts changelog data in place, this assumes the tables exist""" # insert packagenumber + checksum into 'packages' table q = 'insert into packages values (?, ?)' p = (self.crp_packagenumber, self.checksum) - + cur.execute(q, p) if self.changelog: q = 'insert into changelog ("pkgKey", "date", "author", "changelog") values (%s, ?, ?, ?)' % self.crp_packagenumber cur.executemany(q, self.changelog) - + def do_sqlite_dump(self, md_sqlite): """write the metadata out to the sqlite dbs""" self.do_primary_sqlite_dump(md_sqlite.primary_cursor) @@ -228,8 +228,3 @@ class CreateRepoPackage(YumLocalPackage): md_sqlite.file_cx.commit() self.do_other_sqlite_dump(md_sqlite.other_cursor) md_sqlite.other_cx.commit() - - - - - diff --git a/dmd.py b/dmd.py index fdce45c..684bac6 100755 --- a/dmd.py +++ b/dmd.py @@ -89,7 +89,7 @@ class MdType(object): additions = deltatree.find(self.deltasns + 'additions').getchildren() removals = deltatree.find(self.deltasns + 'removals').getchildren() - + for pkg in additions: pkgid = self.get_pkg_id(pkg) if oldpkgshash.has_key(pkgid): @@ -106,7 +106,7 @@ class MdType(object): oldcount = int(oldroot.get('packages')) newcount = oldcount + len(additions) - len(removals) - oldroot.set('packages', str(newcount)) + oldroot.set('packages', str(newcount)) print tostring(oldtree, pretty_print=True) diff --git a/genpkgmetadata.py b/genpkgmetadata.py index 7877ba9..4ab4e9a 100755 --- a/genpkgmetadata.py +++ b/genpkgmetadata.py @@ -16,7 +16,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Copyright 2004 Duke University -# Portions Copyright 2009 Red Hat, Inc - +# Portions Copyright 2009 Red Hat, Inc - # written by seth vidal skvidal at fedoraproject.org import os @@ -36,7 +36,7 @@ def parse_args(args, conf): Parse the command line args. return a config object. Sanity check all the things being passed in. """ - + _def = yum.misc._default_checksums[0] _avail = yum.misc._available_checksums parser = OptionParser(version = "createrepo %s" % createrepo.__version__) @@ -67,21 +67,21 @@ def parse_args(args, conf): parser.add_option("-d", "--database", default=False, action="store_true", help="create sqlite database files") # temporarily disabled - #parser.add_option("--database-only", default=False, action="store_true", + #parser.add_option("--database-only", default=False, action="store_true", # dest='database_only', # help="Only make the sqlite databases - does not work with --update, yet") parser.add_option("--update", default=False, action="store_true", help="use the existing repodata to speed up creation of new") parser.add_option("--update-md-path", default=None, dest='update_md_path', help="use the existing repodata for --update from this path") - parser.add_option("--skip-stat", dest='skip_stat', default=False, + parser.add_option("--skip-stat", dest='skip_stat', default=False, help="skip the stat() call on a --update, assumes if the file" \ "name is the same then the file is still the same " \ "(only use this if you're fairly trusting or gullible)", action="store_true") parser.add_option("--split", default=False, action="store_true", help="generate split media") - parser.add_option("-i", "--pkglist", default=None, + parser.add_option("-i", "--pkglist", default=None, help="use only the files listed in this file from the " \ "directory specified") parser.add_option("-n", "--includepkg", default=[], action="append", @@ -100,35 +100,35 @@ def parse_args(args, conf): default=False, action="store_true") parser.add_option("--distro", default=[], action="append", help="distro tag and optional cpeid: --distro" "'cpeid,textname'") - parser.add_option("--content", default=[], dest='content_tags', + parser.add_option("--content", default=[], dest='content_tags', action="append", help="tags for the content in the repository") parser.add_option("--revision", default=None, help="user-specified revision for this repository") parser.add_option("--deltas", default=False, action="store_true", help="create delta rpms and metadata") - parser.add_option("--oldpackagedirs", default=[], dest="oldpackage_paths", + parser.add_option("--oldpackagedirs", default=[], dest="oldpackage_paths", action="append", help="paths to look for older pkgs to delta against") parser.add_option("--num-deltas", default=1, dest='num_deltas', type='int', help="the number of older versions to make deltas against") parser.add_option("--read-pkgs-list", default=None, dest='read_pkgs_list', help="output the paths to the pkgs actually read useful with --update") - parser.add_option("--max-delta-rpm-size", default=100000000, - dest='max_delta_rpm_size', type='int', + parser.add_option("--max-delta-rpm-size", default=100000000, + dest='max_delta_rpm_size', type='int', help="max size of an rpm that to run deltarpm against (in bytes)") (opts, argsleft) = parser.parse_args(args) if len(argsleft) > 1 and not opts.split: errorprint(_('Error: Only one directory allowed per run.')) parser.print_usage() sys.exit(1) - + elif len(argsleft) == 0: errorprint(_('Error: Must specify a directory to index.')) parser.print_usage() sys.exit(1) - + else: directories = argsleft - + if opts.sumtype == 'sha1': errorprint(_('Warning: It is more compatible to use sha instead of sha1')) @@ -143,7 +143,7 @@ def parse_args(args, conf): if opts.simple_md_filenames: opts.unique_md_filenames = False - + # let's switch over to using the conf object - put all the opts into it for opt in parser.option_list: if opt.dest is None: # this is fairly silly @@ -152,7 +152,7 @@ def parse_args(args, conf): if getattr(opts, opt.dest) is None: continue setattr(conf, opt.dest, getattr(opts, opt.dest)) - + directory = directories[0] conf.directory = directory conf.directories = directories @@ -175,15 +175,15 @@ def parse_args(args, conf): continue lst.append(line) pfo.close() - + conf.pkglist = lst if conf.includepkg: conf.pkglist.extend(conf.includepkg) - + if conf.changelog_limit: # make sure it is an int, not a string conf.changelog_limit = int(conf.changelog_limit) - + return conf class MDCallBack(object): @@ -191,18 +191,18 @@ class MDCallBack(object): def errorlog(self, thing): """error log output""" print >> sys.stderr, thing - + def log(self, thing): """log output""" print thing - + def progress(self, item, current, total): """progress bar""" beg = "%*d/%d - " % (len(str(total)), current, total) left = 80 - len(beg) sys.stdout.write("\r%s%-*.*s" % (beg, left, left, item)) sys.stdout.flush() - + def main(args): """createrepo from cli main flow""" start_st = time.time() @@ -211,13 +211,13 @@ def main(args): if conf.profile: print ('start time: %0.3f' % (time.time() - start_st)) - mid_st = time.time() + mid_st = time.time() try: if conf.split: - mdgen = createrepo.SplitMetaDataGenerator(config_obj=conf, + mdgen = createrepo.SplitMetaDataGenerator(config_obj=conf, callback=MDCallBack()) else: - mdgen = createrepo.MetaDataGenerator(config_obj=conf, + mdgen = createrepo.MetaDataGenerator(config_obj=conf, callback=MDCallBack()) if mdgen.checkTimeStamps(): if mdgen.conf.verbose: @@ -226,7 +226,7 @@ def main(args): if conf.profile: print ('mid time: %0.3f' % (time.time() - mid_st)) - + pm_st = time.time() mdgen.doPkgMetadata() if conf.profile: @@ -239,8 +239,8 @@ def main(args): mdgen.doFinalMove() if conf.profile: print ('fm time: %0.3f' % (time.time() - fm_st)) - - + + except MDError, errormsg: errorprint(_('%s') % errormsg) sys.exit(1) diff --git a/mergerepo.py b/mergerepo.py index 6b0a7ff..80ab504 100755 --- a/mergerepo.py +++ b/mergerepo.py @@ -30,9 +30,9 @@ def parse_args(args): """Parse our opts/args""" usage = """ mergerepo: take 2 or more repositories and merge their metadata into a new repo - + mergerepo --repo=url --repo=url --outputdir=/some/path""" - + parser = OptionParser(version = "mergerepo 0.1", usage=usage) # query options parser.add_option("-r", "--repo", dest='repos', default=[], action="append", @@ -40,7 +40,7 @@ def parse_args(args): parser.add_option("-a", "--archlist", default=[], action="append", help="Defaults to all arches - otherwise specify arches") parser.add_option("-d", "--database", default=False, action="store_true") - parser.add_option("-o", "--outputdir", default=None, + parser.add_option("-o", "--outputdir", default=None, help="Location to create the repository") parser.add_option("", "--nogroups", default=False, action="store_true", help="Do not merge group(comps) metadata") @@ -52,16 +52,16 @@ def parse_args(args): parser.print_usage() sys.exit(1) - # sort out the comma-separated crap we somehow inherited. + # sort out the comma-separated crap we somehow inherited. archlist = [] for archs in opts.archlist: for arch in archs.split(','): archlist.append(arch) opts.archlist = archlist - + return opts - + def main(args): """main""" opts = parse_args(args) diff --git a/modifyrepo.py b/modifyrepo.py index bbf938d..1fd9ea4 100755 --- a/modifyrepo.py +++ b/modifyrepo.py @@ -93,7 +93,7 @@ class RepoMetadata: csum, destmd = checksum_and_rename(destmd, self.checksum_type) base_destmd = os.path.basename(destmd) - + ## Remove any stale metadata for elem in self.doc.getElementsByTagName('data'): @@ -109,21 +109,21 @@ class RepoMetadata: self._insert_element(data, 'location', attrs={ 'href' : 'repodata/' + base_destmd }) data.appendChild(self.doc.createTextNode("\n ")) - self._insert_element(data, 'checksum', - attrs={ 'type' : self.checksum_type }, + self._insert_element(data, 'checksum', + attrs={ 'type' : self.checksum_type }, text=csum) data.appendChild(self.doc.createTextNode("\n ")) self._insert_element(data, 'timestamp', text=str(os.stat(destmd).st_mtime)) data.appendChild(self.doc.createTextNode("\n ")) - self._insert_element(data, 'open-checksum', + self._insert_element(data, 'open-checksum', attrs={ 'type' : self.checksum_type }, text=open_csum) data.appendChild(self.doc.createTextNode("\n ")) root.appendChild(self.doc.createTextNode("\n")) - print " type =", mdtype + print " type =", mdtype print " location =", 'repodata/' + mdname print " checksum =", csum print " timestamp =", str(os.stat(destmd).st_mtime) @@ -151,4 +151,3 @@ if __name__ == '__main__': except MDError, e: print "Could not add metadata from file %s: %s" % (sys.argv[1], str(e)) sys.exit(1) -