Refactored deltarepo
authorTomas Mlcoch <tmlcoch@redhat.com>
Mon, 2 Dec 2013 16:56:58 +0000 (17:56 +0100)
committerTomas Mlcoch <tmlcoch@redhat.com>
Mon, 2 Dec 2013 16:57:24 +0000 (17:57 +0100)
deltarepo/deltarepo/__init__.py
deltarepo/deltarepo/applicator.py
deltarepo/deltarepo/common.py
deltarepo/deltarepo/delta_plugins.py
deltarepo/deltarepo/generator.py

index fad0c58..5c19a70 100644 (file)
@@ -9,7 +9,7 @@ Copyright (C) 2013   Tomas Mlcoch
 """
 
 import createrepo_c as cr
-from deltarepo.common import LoggingInterface, Metadata, RemovedXml
+from deltarepo.common import LoggingInterface, Metadata
 from deltarepo.applicator import DeltaRepoApplicator
 from deltarepo.generator import DeltaRepoGenerator
 from deltarepo.delta_plugins import PLUGINS
index df86827..80391f8 100644 (file)
@@ -11,8 +11,8 @@ Copyright (C) 2013   Tomas Mlcoch
 import os
 import shutil
 import createrepo_c as cr
-from deltarepo.common import LoggingInterface, Metadata, RemovedXml
-from deltarepo.delta_plugins import PLUGINS, GENERAL_PLUGIN
+from deltarepo.common import LoggingInterface, Metadata, DeltaMetadata, PluginBundle
+from deltarepo.delta_plugins import GlobalBundle, PLUGINS, GENERAL_PLUGIN
 from deltarepo.errors import DeltaRepoError, DeltaRepoPluginError
 
 __all__ = ['DeltaRepoApplicator']
@@ -32,7 +32,7 @@ class DeltaRepoApplicator(LoggingInterface):
         self.repoid_type = None
         self.unique_md_filenames = False
         self.databases = False
-        self.removedxmlobj = RemovedXml()
+        self.deltametadata = DeltaMetadata()
 
         self.out_path = out_path or "./"
 
@@ -115,19 +115,18 @@ class DeltaRepoApplicator(LoggingInterface):
 
         # Load removedxml
         self.removedxml_path = None
-        if "removed" in self.delta_records:
-            self.removedxml_path = os.path.join(self.delta_repo_path,
-                                   self.delta_records["removed"].location_href)
-            self.removedxmlobj.xml_parse(self.removedxml_path)
+        if "deltametadata" in self.delta_records:
+            self.deltametadata_path = os.path.join(self.delta_repo_path,
+                                   self.delta_records["deltametadata"].location_href)
+            self.deltametadata.xmlparse(self.deltametadata_path)
         else:
-            self._warning("\"removed\" record is missing in repomd.xml "\
+            self._warning("\"deltametadata\" record is missing in repomd.xml "\
                           "of delta repo")
 
-        # Prepare bundle
-        self.bundle = {}
-        self.bundle["repoid_type_str"] = self.repoid_type_str
-        self.bundle["removed_obj"] = self.removedxmlobj
-        self.bundle["unique_md_filenames"] = self.unique_md_filenames
+        # Prepare global bundle
+        self.globalbundle = GlobalBundle()
+        self.globalbundle.repoid_type_str = self.repoid_type_str
+        self.globalbundle.unique_md_filenames = self.unique_md_filenames
 
     def _new_metadata(self, metadata_type):
         """Return Metadata Object for the metadata_type or None"""
@@ -153,12 +152,11 @@ class DeltaRepoApplicator(LoggingInterface):
             if not os.path.isfile(metadata.old_fn):
                 self._warning("File {0} doesn't exist in target repository" \
                               " (but it should)!".format(metadata.old_fn))
-                self._warning("Metadata of the type \"{0}\" won't be " \
+                self._warning("Metadata of the type \"{0}\" maybe won't be " \
                               "available in the generated repository".format(metadata_type))
-                self._warning("Output repository WON'T be a 1:1 identical " \
+                self._warning("Output repository maybe WON'T be a 1:1 identical " \
                               "to the target repository!")
                 metadata.old_fn = None
-                return None
 
         # Set output directory
         metadata.out_dir = self.new_repodata_path
@@ -180,93 +178,51 @@ class DeltaRepoApplicator(LoggingInterface):
         # Prepare output path
         os.mkdir(self.new_repodata_path)
 
+        # Set of types of processed metadata records ("primary", "primary_db"...)
         processed_metadata = set()
-        used_plugins = set()
-        plugin_used = True
-
-        while plugin_used:
-            # Iterate on plugins until any of them was used
-            plugin_used = False
-
-            for plugin in PLUGINS:
-
-                # Use only plugins that haven't been already used
-                if plugin in used_plugins:
-                    continue
-
-                # Check which metadata this plugin want to process
-                conflicting_metadata = set(plugin.METADATA) & processed_metadata
-                if conflicting_metadata:
-                    message = "Plugin {0}: Error - Plugin want to process " \
-                              "already processed metadata {1}".format(
-                               plugin.NAME, conflicting_metadata)
-                    self._error(message)
-                    raise DeltaRepoError(message)
-
-                # Prepare metadata for the plugin
-                metadata_objects = {}
-                for metadata_name in plugin.METADATA:
-                    metadata_object = self._new_metadata(metadata_name)
-                    if metadata_object is not None:
-                        metadata_objects[metadata_name] = metadata_object
-
-                # Skip plugin if no supported metadata available
-                if not metadata_objects:
-                    self._debug("Plugin {0}: Skipped - None of supported " \
-                                "metadata {1} available".format(
-                                plugin.NAME, plugin.METADATA))
-                    used_plugins.add(plugin)
-                    continue
-
-                # Check if bundle contains all what plugin need
-                required_bundle_keys = set(plugin.APPLY_REQUIRED_BUNDLE_KEYS)
-                bundle_keys = set(self.bundle.keys())
-                if not required_bundle_keys.issubset(bundle_keys):
-                    self._debug("Plugin {0}: Skipped - Bundle keys {1} "\
-                                "are not available".format(plugin.NAME,
-                                (required_bundle_keys - bundle_keys)))
-                    continue
-
-                # Use the plugin
-                self._debug("Plugin {0}: Active".format(plugin.NAME))
-                plugin_instance = plugin()
-                plugin_instance.apply(metadata_objects, self.bundle)
-
-                # Check what bundle keys was added by the plugin
-                new_bundle_keys = set(self.bundle.keys())
-                diff = new_bundle_keys - bundle_keys
-                if diff != set(plugin.APPLY_BUNDLE_CONTRIBUTION):
-                    message = "Plugin {0}: Error - Plugin should add: {1} " \
-                               "bundle items but add: {2}".format(
-                               plugin.NAME, plugin.APPLY_BUNDLE_CONTRIBUTION,
-                               list(diff))
-                    self._error(message)
-                    raise DeltaRepoError(message)
-
-                # Put repomd records from processed metadatas to repomd
-                for md in metadata_objects.values():
-                    self._debug("Plugin {0}: Processed \"{1}\" delta record "\
-                                "which produced:".format(
-                                plugin.NAME, md.metadata_type))
-                    for repomd_record in md.generated_repomd_records:
-                        self._debug(" - {0}".format(repomd_record.type))
-                        self.new_repomd.set_record(repomd_record)
-
-                # Organization stuff
-                for md in metadata_objects.keys():
-                    if md in self.bundle["no_processed"]:
-                        self._debug("Plugin {0}: Skip processing of \"{1}\" delta record".format(
-                                    plugin.NAME, md))
-                        continue
-                    processed_metadata.add(md)
-
-                used_plugins.add(plugin)
-                plugin_used = True
+
+        for plugin in PLUGINS:
+
+            # Prepare metadata for the plugin
+            metadata_objects = {}
+            for metadata_name in plugin.METADATA:
+                metadata_object = self._new_metadata(metadata_name)
+                if metadata_object is not None:
+                    metadata_objects[metadata_name] = metadata_object
+
+            # Skip plugin if no supported metadata available
+            if not metadata_objects:
+                self._debug("Plugin {0}: Skipped - None of supported " \
+                            "metadata {1} available".format(
+                            plugin.NAME, plugin.METADATA))
+                continue
+
+            # Prepare plugin bundle
+            # TODO: Check plugin version
+            pluginbundle = self.deltametadata.get_pluginbundle(plugin.NAME)
+
+            # Use the plugin
+            self._debug("Plugin {0}: Active".format(plugin.NAME))
+            plugin_instance = plugin(pluginbundle, self.globalbundle)
+            plugin_instance.apply(metadata_objects)
+
+            # Put repomd records from processed metadatas to repomd
+            for md in metadata_objects.values():
+                self._debug("Plugin {0}: Processed \"{1}\" delta record "\
+                            "which produced:".format(
+                            plugin.NAME, md.metadata_type))
+                for repomd_record in md.generated_repomd_records:
+                    self._debug(" - {0}".format(repomd_record.type))
+                    self.new_repomd.set_record(repomd_record)
+
+            # Organization stuff
+            for md in metadata_objects.keys():
+                processed_metadata.add(md)
 
         # Process rest of the metadata files
         metadata_objects = {}
         for rectype, rec in self.delta_records.items():
-            if rectype == "removed":
+            if rectype == "deltametadata":
                 continue
             if rectype not in processed_metadata:
                 metadata_object = self._new_metadata(rectype)
@@ -280,8 +236,8 @@ class DeltaRepoApplicator(LoggingInterface):
         if metadata_objects:
             # Use the plugin
             self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME))
-            plugin_instance = GENERAL_PLUGIN()
-            plugin_instance.apply(metadata_objects, self.bundle)
+            plugin_instance = GENERAL_PLUGIN(None, self.globalbundle)
+            plugin_instance.apply(metadata_objects)
 
             for md in metadata_objects.values():
                 self._debug("Plugin {0}: Processed \"{1}\" delta record "\
@@ -291,38 +247,36 @@ class DeltaRepoApplicator(LoggingInterface):
                     self._debug(" - {0}".format(repomd_record.type))
                     self.new_repomd.set_record(repomd_record)
 
-        self._debug("Used plugins: {0}".format([p.NAME for p in used_plugins]))
-
         # Check if calculated repoids match
         self._debug("Checking expected repoids")
 
-        if "old_repoid" in self.bundle:
-            if self.old_id != self.bundle["old_repoid"]:
+        if self.globalbundle.calculated_old_repoid:
+            if self.old_id != self.globalbundle.calculated_old_repoid:
                 message = "Repoid of the \"{0}\" repository doesn't match "\
                           "the real repoid ({1} != {2}).".format(
                            self.old_repo_path, self.old_id,
-                           self.bundle["old_repoid"])
+                           self.globalbundle.calculate_old_repoid)
                 self._error(message)
                 raise DeltaRepoError(message)
             else:
                 self._debug("Repoid of the old repo matches ({0})".format(
                             self.old_id))
         else:
-            self._warning("\"old_repoid\" item is missing in bundle.")
+            self._warning("\"old_repoid\" item was not calculated.")
 
-        if "new_repoid" in self.bundle:
-            if self.new_id != self.bundle["new_repoid"]:
+        if self.globalbundle.calculated_new_repoid:
+            if self.new_id != self.globalbundle.calculated_new_repoid:
                 message = "Repoid of the \"{0}\" repository doesn't match "\
                           "the real repoid ({1} != {2}).".format(
                            self.new_repo_path, self.new_id,
-                           self.bundle["new_repoid"])
+                           self.globalbundle.calculated_new_repoid)
                 self._error(message)
                 raise DeltaRepoError(message)
             else:
                 self._debug("Repoid of the new repo matches ({0})".format(
                             self.new_id))
         else:
-            self._warning("\"new_repoid\" item is missing in bundle.")
+            self._warning("\"new_repoid\" item was not calculated.")
 
         # Prepare and write out the new repomd.xml
         self._debug("Preparing repomd.xml ...")
index 2890e36..731f783 100644 (file)
@@ -4,6 +4,7 @@ import logging
 import xml.dom.minidom
 import createrepo_c as cr
 from lxml import etree
+from deltarepo.errors import DeltaRepoError
 
 class LoggingInterface(object):
     def __init__(self, logger=None):
@@ -27,90 +28,159 @@ class LoggingInterface(object):
     def _critical(self, msg):
         self.logger.critical(msg)
 
-class RemovedXml(object):
+class AdditionalXmlData(object):
+    """
+    Interface to store/load additional data to/from xml.
+    """
+
+    ADDITIONAL_XML_DATA = True
+
     def __init__(self):
-        self.database = {}  # e.g.: {"primary": "1", "filelists": "1", "other": "1"}
-        self.packages = {}  # { location_href: location_base }
-        self.files = {}     # { location_href: location_base or Null }
-
-    def __str__(self):
-        print self.database_gen
-        print self.packages
-        print self.files
-
-    def set_database(self, type, val):
-        self.database[type] = "1" if val else "0"
-
-    def get_database(self, type, default=False):
-        if type in self.database:
-            return self.database[type] != "0"
-        return default
-
-    def add_pkg(self, pkg):
-        self.packages[pkg.location_href] = pkg.location_base
-
-    def add_pkg_locations(self, location_href, location_base):
-        self.packages[location_href] = location_base
-
-    def add_record(self, rec):
-        self.files[rec.location_href] = rec.location_base
-
-    def xml_dump(self):
-        xmltree = etree.Element("removed")
-        database = etree.SubElement(xmltree, "database", self.database)
-        packages = etree.SubElement(xmltree, "packages")
-        for href, base in self.packages.iteritems():
-            attrs = {}
-            if href: attrs['href'] = href
-            if base: attrs['base'] = base
-            if not attrs: continue
-            etree.SubElement(packages, "location", attrs)
-        files = etree.SubElement(xmltree, "files")
-        for href, base in self.files.iteritems():
-            attrs = {}
-            if href: attrs['href'] = href
-            if base: attrs['base'] = base
-            if not attrs: continue
-            etree.SubElement(files, "location", attrs)
+        self._data = {}
+        self._lists = {}
+
+    def set(self, key, value):
+        if not isinstance(key, basestring):
+            raise TypeError("expected string as key")
+        if not isinstance(value, basestring):
+            raise TypeError("expected string as value")
+        self._data[key] = value
+
+    def update(self, dictionary):
+        if not isinstance(dictionary, dict):
+            raise TypeError("expected dictionary")
+
+        for key, val in dictionary.items():
+            self.set(key, val)
+
+    def append(self, listname, dictionary):
+        if not isinstance(listname, basestring):
+            raise TypeError("expected string")
+        if not isinstance(dictionary, dict):
+            raise TypeError("expected dict")
+
+        if not listname in self._lists:
+            self._lists[listname] = []
+
+        # Validate items first
+        for key, val in dictionary.items():
+            if not isinstance(key, basestring) or not isinstance(val, basestring):
+                raise TypeError("Dict's keys and values must be string")
+
+        self._lists[listname].append(dictionary)
+
+    def get(self, key, default=None):
+        return self._data.get(key, default)
+
+    def get_list(self, key, default=None):
+        return self._lists.get(key, default)
+
+    def subelement(self, parent, name, in_attrs=None):
+        attrs = {}
+        attrs.update(self._data)
+        if in_attrs:
+            attrs.update(in_attrs)
+        elem = etree.SubElement(parent, name, attrs)
+
+        for listname, listvalues in self._lists.items():
+            for val in listvalues:
+                etree.SubElement(elem, listname, val)
+
+        return elem
+
+class PluginBundle(AdditionalXmlData):
+    def __init__(self, name, version):
+        AdditionalXmlData.__init__(self)
+
+        if not isinstance(name, basestring):
+            raise TypeError("string expected")
+        if not isinstance(version, int):
+            raise TypeError("integer expected")
+
+        self.name = name
+        self.version = version
+
+class DeltaMetadata(AdditionalXmlData):
+    """Object that represents bundle.xml file in deltarepository.
+    """
+
+    def __init__(self):
+        AdditionalXmlData.__init__(self)
+
+        self.usedplugins = {}
+
+    def add_pluginbundle(self, pluginbundle):
+        self.usedplugins[pluginbundle.name] = pluginbundle
+
+    def get_pluginbundle(self, name):
+        return self.usedplugins.get(name, None)
+
+
+    def xmldump(self):
+        xmltree = etree.Element("deltametadata")
+
+        usedplugins = etree.SubElement(xmltree, "usedplugins")
+        for plugin in self.usedplugins.values():
+            attrs = {"name": plugin.name, "version": str(plugin.version)}
+            plugin.subelement(usedplugins, "plugin", attrs)
         return etree.tostring(xmltree,
                               pretty_print=True,
                               encoding="UTF-8",
                               xml_declaration=True)
 
-    def xml_parse(self, path):
-
+    def xmlparse(self, path):
         _, tmp_path = tempfile.mkstemp()
         cr.decompress_file(path, tmp_path, cr.AUTO_DETECT_COMPRESSION)
         dom = xml.dom.minidom.parse(tmp_path)
         os.remove(tmp_path)
 
-        database = dom.getElementsByTagName("database")
-        if database and database[0]:
-            for x in xrange(database[0].attributes.length):
-                attr = database[0].attributes.item(x)
-                self.database[attr.name] = attr.value
-
-        packages = dom.getElementsByTagName("packages")
-        if packages and packages[0]:
-            for loc in packages[0].getElementsByTagName("location"):
-                href = loc.getAttribute("href")
-                base = loc.getAttribute("base")
-                if not href:
-                    continue
-                if not base:
-                    base = None
-                self.packages[href] = base
-
-        files = dom.getElementsByTagName("files")
-        if files and files[0]:
-            for loc in files[0].getElementsByTagName("location"):
-                href = loc.getAttribute("href")
-                base = loc.getAttribute("base")
-                if not href:
-                    continue
-                if not base:
-                    base = None
-                self.files[href] = base
+        deltametadata = dom.getElementsByTagName("deltametadata")
+        if not deltametadata or not deltametadata[0]:
+            raise DeltaRepoError("Cannot parse {0}".fromat(path))
+
+        usedplugins = dom.getElementsByTagName("plugin")
+        for plugin_node in usedplugins:
+            name = None
+            version = None
+            other = {}
+
+            # Parse attributes
+            for x in xrange(plugin_node.attributes.length):
+                attr = plugin_node.attributes.item(x)
+                if attr.name == "name":
+                    name = attr.value
+                elif attr.name == "version":
+                    version = attr.value
+                else:
+                    other[attr.name] = attr.value
+
+            if not name or not version:
+                raise DeltaRepoError("Bad XML: name or version attribute "
+                                     "of plugin element is missing")
+
+            try:
+                version_int = int(version)
+            except ValueError:
+                raise DeltaRepoError("Version {0} cannot be converted to "
+                                     "integer number".format(version))
+
+            bp = PluginBundle(name, version_int)
+            bp.update(other)
+
+            # Parse subelements
+            for list_item_node in plugin_node.childNodes:
+                if list_item_node.nodeType != xml.dom.minidom.Node.ELEMENT_NODE:
+                    continue;
+
+                dictionary = {}
+                listname = list_item_node.nodeName
+                for x in xrange(list_item_node.attributes.length):
+                    attr = list_item_node.attributes.item(x)
+                    dictionary[attr.name] = attr.value
+
+                bp.append(listname, dictionary)
+
+            self.usedplugins[bp.name] = bp
 
 class Metadata(object):
     """Metadata file"""
index bc31e3e..9f90fbe 100644 (file)
@@ -8,6 +8,21 @@ from deltarepo.errors import DeltaRepoError, DeltaRepoPluginError
 PLUGINS = []
 GENERAL_PLUGIN = None
 
+class GlobalBundle(object):
+
+    __slots__ = ("repoid_type_str",
+                 "unique_md_filenames",
+                 "calculated_old_repoid",
+                 "calculated_new_repoid")
+
+    def __init__(self):
+        self.repoid_type_str = "sha256"
+        self.unique_md_filenames = True
+
+        # Filled by plugins
+        self.calculated_old_repoid = None
+        self.calculated_new_repoid = None
+
 class DeltaRepoPlugin(object):
 
     # Plugin name
@@ -21,53 +36,64 @@ class DeltaRepoPlugin(object):
     # to apply deltas on them!
     MATADATA = []
 
-    # Its highly recomended for plugin to be maximal independend on
-    # other plugins and metadata not specified in METADATA.
-    # But some kind of dependency mechanism can be implemented via
-    # *_REQUIRED_BUNDLE_KEYS and *_BUDLE_CONTRIBUTION.
-
-    # List of bundle keys that have to be filled before
-    # apply() method of this plugin should be called
-    APPLY_REQUIRED_BUNDLE_KEYS = []
-
-    # List of bundle key this pulugin adds during apply() method call
-    APPLY_BUNDLE_CONTRIBUTION = []
-
-    # List of bundle keys that have to be filled before
-    # gen() method of this plugin should be called
-    GEN_REQUIRED_BUNDLE_KEYS = []
-
-    # List of bundle key this pulugin adds during gen() method call
-    GEN_BUNDLE_CONTRIBUTION = []
-
-    # If two plugins want to add the same key to the bundle
-    # then exception is raised.
-    # If plugin requires a key that isn't provided by any of registered
-    # plugins then exception is raised.
-    # If plugin adds to bundle a key that is not listed in BUNDLE_CONTRIBUTION,
-    # then exception is raised.
-
-    def __init__(self):
-        pass
-
-    def apply(self, metadata, bundle):
+    def __init__(self, pluginbundle, globalbundle):
+
+        # PluginBundle object.
+        # This object store data in persistent way to the generated delta repodata.
+        # This object is empty when gen() plugin method is called and plugin
+        # should use it to store necessary information.
+        # During apply() this object should be filled with data
+        # previously stored during gen() method
+        self.pluginbundle = pluginbundle
+
+        # Global bundle carry
+        self.globalbundle = globalbundle
+
+    def gen_use_original(self, md):
+        """Function that takes original metadata file, and use it as a delta
+        Plugins could use this function when they cannot generate delta file
+        for some reason (eg. file is newly added, so delta is
+        meaningless/impossible)."""
+        md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))
+        shutil.copy2(md.new_fn, md.delta_fn)
+
+        # Prepare repomd record of xml file
+        rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
+        rec.fill(md.checksum_type)
+        if self.globalbundle.unique_md_filenames:
+            rec.rename_file()
+
+        md.generated_repomd_records.append(rec)
+
+    def apply_use_original(self, md):
+        """Reversal function for the gen_use_original"""
+        md.new_fn = os.path.join(md.out_dir, os.path.basename(md.delta_fn))
+        shutil.copy2(md.delta_fn, md.new_fn)
+
+        # Prepare repomd record of xml file
+        rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
+        rec.fill(md.checksum_type)
+        if self.globalbundle.unique_md_filenames:
+            rec.rename_file()
+
+        md.generated_repomd_records.append(rec)
+
+    def apply(self, metadata):
         """
         :arg metadata: Dict with available metadata listed in METADATA.
             key is metadata type (e.g. "primary", "filelists", ...)
             value is Metadata object
             This method is called only if at least one metadata listed
             in METADATA are found in delta repomd.
-        :arg bundle: Dict with various metadata.
 
         Apply method has to do:
          * Raise DeltaRepoPluginError if something is bad
-         * Build a new filename for each metadata and store it
-           to Metadata Object.
-         * 
+         * Build a new filename and create a file for each metadata and
+           store it to Metadata Object.
         """
         raise NotImplementedError("Not implemented")
 
-    def gen(self, metadata, bundle):
+    def gen(self, metadata):
         raise NotImplementedError("Not implemented")
 
 
@@ -76,55 +102,19 @@ class GeneralDeltaRepoPlugin(DeltaRepoPlugin):
     NAME = "GeneralDeltaPlugin"
     VERSION = 1
     METADATA = []
-    APPLY_REQUIRED_BUNDLE_KEYS = ["removed_obj",
-                                  "unique_md_filenames"]
-    APPLY_BUNDLE_CONTRIBUTION = []
-    GEN_REQUIRED_BUNDLE_KEYS = ["removed_obj",
-                                "unique_md_filenames"]
-    GEN_BUNDLE_CONTRIBUTION = []
 
     def _path(self, path, record):
         """Return path to the repodata file."""
         return os.path.join(path, record.location_href)
 
-    def apply(self, metadata, bundle):
-
-        # Get info from bundle
-        removed_obj = bundle["removed_obj"]
-        unique_md_filenames = bundle["unique_md_filenames"]
-
-        #
+    def apply(self, metadata):
         for md in metadata.values():
-            md.new_fn = os.path.join(md.out_dir, os.path.basename(md.delta_fn))
-            shutil.copy2(md.delta_fn, md.new_fn)
-
-            # Prepare repomd record of xml file
-            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
-            rec.fill(md.checksum_type)
-            if unique_md_filenames:
-                rec.rename_file()
-
-            md.generated_repomd_records.append(rec)
-
-    def gen(self, metadata, bundle):
+            self.apply_use_original(md)
 
+    def gen(self, metadata):
         ## TODO: Compress uncompressed data
-
-        # Get info from bundle
-        removed_obj = bundle["removed_obj"]
-        unique_md_filenames = bundle["unique_md_filenames"]
-
         for md in metadata.values():
-            md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))
-            shutil.copy2(md.new_fn, md.delta_fn)
-
-            # Prepare repomd record of xml file
-            rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
-            rec.fill(md.checksum_type)
-            if unique_md_filenames:
-                rec.rename_file()
-
-            md.generated_repomd_records.append(rec)
+            self.gen_use_original(md)
 
 GENERAL_PLUGIN = GeneralDeltaRepoPlugin
 
@@ -133,15 +123,8 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
 
     NAME = "MainDeltaPlugin"
     VERSION = 1
-    METADATA = ["primary", "filelists", "other", "primary_db", "filelists_db", "other_db"]
-    APPLY_REQUIRED_BUNDLE_KEYS = ["repoid_type_str",
-                                  "removed_obj",
-                                  "unique_md_filenames"]
-    APPLY_BUNDLE_CONTRIBUTION = ["old_repoid", "new_repoid", "no_processed"]
-    GEN_REQUIRED_BUNDLE_KEYS = ["repoid_type_str",
-                                "removed_obj",
-                                "unique_md_filenames"]
-    GEN_BUNDLE_CONTRIBUTION = ["old_repoid", "new_repoid", "no_processed"]
+    METADATA = ["primary", "filelists", "other",
+                "primary_db", "filelists_db", "other_db"]
 
     def _path(self, path, record):
         """Return path to the repodata file."""
@@ -166,20 +149,35 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                           pkg.location_base or '')
         return idstr
 
-    def apply(self, metadata, bundle):
-
-        # Get info from bundle
-        removed_obj = bundle["removed_obj"]
-        repoid_type_str = bundle["repoid_type_str"]
-        unique_md_filenames = bundle["unique_md_filenames"]
+    def apply(self, metadata):
+
+        gen_db_for = set([])
+        only_copied_metadata = set({})
+        removed_packages = {}
+
+        # Make a set of md_types for which databases should be generated
+        for record in self.pluginbundle.get_list("metadata", []):
+            mdtype = record.get("type")
+            if not mdtype:
+                continue
+            if record.get("database", "") == "1":
+                gen_db_for.add(mdtype)
+            if record.get("original", "") == "1":
+                only_copied_metadata.add(mdtype)
+
+        # Make a dict of removed packages key is location_href,
+        # value is location_base
+        for record in self.pluginbundle.get_list("removedpackage", []):
+            location_href = record.get("location_href")
+            if not location_href:
+                continue
+            location_base = record.get("location_base")
+            removed_packages[location_href] = location_base
 
         # Check input arguments
         if "primary" not in metadata:
             raise DeltaRepoPluginError("Primary metadata missing")
 
-        # Names of metadata that was not processed by this plugin
-        no_processed = []
-
         # Metadata that no need to be processed, because they are just copies.
         # This metadata are copied and compressed xml (filelists or other)
         # Their sqlite databases have to be generated if they are required
@@ -189,7 +187,6 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         fil_md = metadata.get("filelists")
         oth_md = metadata.get("other")
 
-
         # Build and prepare destination paths
         # (And store them in the same Metadata object)
         def prepare_paths_in_metadata(md, xmlclass, dbclass):
@@ -197,12 +194,19 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                 return
 
             if not md.old_fn:
-                # Old file of this piece of metadata doesn't exist
-                # This file has to be newly added.
-                no_processed.append(md.metadata_type)
+                # The old file is missing, if the corresponding file
+                # in the deltarepo is not a copy (but just a delta),
+                # this metadata cannot not be generated! (because
+                # there is no file to which delta could be applied)
+                no_processed_metadata.append(md)
+                md.skip = True
+                return
+
+            if not md.old_fn or md.metadata_type in only_copied_metadata:
                 no_processed_metadata.append(md)
                 md.skip = True
                 return
+
             md.skip = False
 
             suffix = cr.compression_suffix(md.compression_type) or ""
@@ -214,7 +218,7 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                                 md.compression_type,
                                 md.new_f_stat)
 
-            if removed_obj and removed_obj.get_database(md.metadata_type, False):
+            if md.metadata_type in gen_db_for:
                 md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(
                                         md.metadata_type))
                 md.db = dbclass(md.db_fn)
@@ -239,7 +243,6 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
 
         # Apply delta
 
-        removed_packages = set() # set of pkgIds (hashes)
         all_packages = {}        # dict { 'pkgId': pkg }
 
         old_repoid_strings = []
@@ -247,8 +250,8 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
 
         def old_pkgcb(pkg):
             old_repoid_strings.append(self._pkg_id_str(pkg))
-            if pkg.location_href in removed_obj.packages:
-                if removed_obj.packages[pkg.location_href] == pkg.location_base:
+            if pkg.location_href in removed_packages:
+                if removed_packages[pkg.location_href] == pkg.location_base:
                     # This package won't be in new metadata
                     return
             new_repoid_strings.append(self._pkg_id_str(pkg))
@@ -276,13 +279,13 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         old_repoid = ""
         new_repoid = ""
 
-        h = hashlib.new(repoid_type_str)
+        h = hashlib.new(self.globalbundle.repoid_type_str)
         old_repoid_strings.sort()
         for i in old_repoid_strings:
             h.update(i)
         old_repoid = h.hexdigest()
 
-        h = hashlib.new(repoid_type_str)
+        h = hashlib.new(self.globalbundle.repoid_type_str)
         new_repoid_strings.sort()
         for i in new_repoid_strings:
             h.update(i)
@@ -355,7 +358,7 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
             rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
             rec.load_contentstat(md.new_f_stat)
             rec.fill(md.checksum_type)
-            if unique_md_filenames:
+            if self.globalbundle.unique_md_filenames:
                 rec.rename_file()
 
             md.generated_repomd_records.append(rec)
@@ -374,7 +377,7 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                                          db_compressed)
                 db_rec.load_contentstat(db_stat)
                 db_rec.fill(md.checksum_type)
-                if unique_md_filenames:
+                if self.globalbundle.unique_md_filenames:
                     db_rec.rename_file()
 
                 md.generated_repomd_records.append(db_rec)
@@ -385,13 +388,27 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         finish_metadata(fil_md)
         finish_metadata(oth_md)
 
-        # Process XML files that was not processed if sqlite should be generated
+        # Process XML files that was just copied
         def finish_skipped_metadata(md):
             if md is None:
                 return
 
+            if not md.old_fn and md.metadata_type not in  only_copied_metadata:
+                # This metadata file is not a copy
+                # This is a delta
+                # But we don't have a original file on which the delta
+                # could be applied
+                # TODO: Add option to ignore this
+                raise DeltaRepoPluginError("Old file of type {0} "
+                    "is missing. The delta file {1} thus cannot be "
+                    "applied".format(md.metadata_type, md.delta_fn))
+                return
+
+            # Copy the file here
+            self.apply_use_original(md)
+
             # Check if sqlite should be generated
-            if removed_obj and removed_obj.get_database(md.metadata_type, False):
+            if md.metadata_type in gen_db_for:
                 md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(
                                         md.metadata_type))
 
@@ -418,7 +435,7 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                                          db_compressed)
                 db_rec.load_contentstat(db_stat)
                 db_rec.fill(md.checksum_type)
-                if unique_md_filenames:
+                if self.globalbundle.unique_md_filenames:
                     db_rec.rename_file()
 
                 md.generated_repomd_records.append(db_rec)
@@ -426,25 +443,20 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         for md in no_processed_metadata:
             finish_skipped_metadata(md)
 
-        # Add other bundle stuff
-        bundle["old_repoid"] = old_repoid
-        bundle["new_repoid"] = new_repoid
-        bundle["no_processed"] = no_processed
-
-    def gen(self, metadata, bundle):
-
-        # Get info from bundle
-        removed_obj = bundle["removed_obj"]
-        repoid_type_str = bundle["repoid_type_str"]
-        unique_md_filenames = bundle["unique_md_filenames"]
+        self.globalbundle.calculated_old_repoid = old_repoid
+        self.globalbundle.calculated_new_repoid = new_repoid
 
+    def gen(self, metadata):
         # Check input arguments
         if "primary" not in metadata:
             raise DeltaRepoPluginError("Primary metadata missing")
 
-        # Metadata that was not processed by this plugin
+        # Metadata for which no delta will be generated
         no_processed = []
 
+        # Medadata info that will be persistently stored
+        persistent_metadata_info = {}
+
         pri_md = metadata.get("primary")
         fil_md = metadata.get("filelists")
         oth_md = metadata.get("other")
@@ -455,16 +467,18 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
             if md is None:
                 return
 
-            if removed_obj:
-                # Make a note to the removed obj if the database should be generated
-                db_type = "{0}_db".format(md.metadata_type)
-                available = metadata.get(db_type)
-                removed_obj.set_database(md.metadata_type, available)
+            # Make a note to the removed obj if the database should be generated
+            db_type = "{0}_db".format(md.metadata_type)
+            available = metadata.get(db_type)
+            if available:
+                persistent_metadata_info.setdefault(md.metadata_type, {})["database"] = "1"
+            else:
+                persistent_metadata_info.setdefault(md.metadata_type, {})["database"] = "0"
 
             if not md.old_fn:
                 # Old file of this piece of metadata doesn't exist
                 # This file has to be newly added.
-                no_processed.append(md.metadata_type)
+                no_processed.append(md)
                 md.skip = True
                 return
             md.skip = False
@@ -534,13 +548,13 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         old_repo_id = ""
         new_repo_id = ""
 
-        h = hashlib.new(repoid_type_str)
+        h = hashlib.new(self.globalbundle.repoid_type_str)
         old_repoid_strings.sort()
         for i in old_repoid_strings:
             h.update(i)
         old_repoid = h.hexdigest()
 
-        h = hashlib.new(repoid_type_str)
+        h = hashlib.new(self.globalbundle.repoid_type_str)
         new_repoid_strings.sort()
         for i in new_repoid_strings:
             h.update(i)
@@ -549,7 +563,10 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
         # Prepare list of removed packages
         removed_pkgs = sorted(old_packages)
         for _, location_href, location_base in removed_pkgs:
-            removed_obj.add_pkg_locations(location_href, location_base)
+            dictionary = {"location_href": location_href}
+            if location_base:
+                dictionary["location_base"] = location_base
+            self.pluginbundle.append("removedpackage", dictionary)
 
         num_of_packages = len(added_packages)
 
@@ -593,7 +610,7 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
             rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
             rec.load_contentstat(md.delta_f_stat)
             rec.fill(md.checksum_type)
-            if unique_md_filenames:
+            if self.globalbundle.unique_md_filenames:
                 rec.rename_file()
 
             md.generated_repomd_records.append(rec)
@@ -612,20 +629,28 @@ class MainDeltaRepoPlugin(DeltaRepoPlugin):
                                          db_compressed)
                 db_rec.load_contentstat(db_stat)
                 db_rec.fill(md.checksum_type)
-                if unique_md_filenames:
+                if self.globalbundle.unique_md_filenames:
                     db_rec.rename_file()
 
                 md.generated_repomd_records.append(db_rec)
 
-        # Add records to the bundle
-
+        # Add records to medata objects
         finish_metadata(pri_md)
         finish_metadata(fil_md)
         finish_metadata(oth_md)
 
-        bundle["old_repoid"] = old_repoid
-        bundle["new_repoid"] = new_repoid
-        bundle["no_processed"] = no_processed
+        # Process no processed items
+        # This files are just copied and compressed, no real delta
+        for md in no_processed:
+            self.gen_use_original(md)
+            persistent_metadata_info.setdefault(md.metadata_type, {})["original"] = "1"
+
+        # Store data persistently
+        for key, val in persistent_metadata_info.items():
+            val["type"] = key
+            self.pluginbundle.append("metadata", val)
 
+        self.globalbundle.calculated_old_repoid = old_repoid
+        self.globalbundle.calculated_new_repoid = new_repoid
 
 PLUGINS.append(MainDeltaRepoPlugin)
index 5cd1fc0..7f4a129 100644 (file)
@@ -11,8 +11,8 @@ Copyright (C) 2013   Tomas Mlcoch
 import os
 import shutil
 import createrepo_c as cr
-from deltarepo.common import LoggingInterface, Metadata, RemovedXml
-from deltarepo.delta_plugins import PLUGINS, GENERAL_PLUGIN
+from deltarepo.common import LoggingInterface, Metadata, DeltaMetadata, PluginBundle
+from deltarepo.delta_plugins import GlobalBundle, PLUGINS, GENERAL_PLUGIN
 from deltarepo.errors import DeltaRepoError, DeltaRepoPluginError
 
 __all__ = ['DeltaRepoGenerator']
@@ -103,14 +103,12 @@ class DeltaRepoGenerator(LoggingInterface):
         self.old_id = self.old_repomd.repoid
         self.new_id = self.new_repomd.repoid
 
-        # Prepare removed xml object
-        self.removedxmlobj = RemovedXml()
+        self.deltametadata = DeltaMetadata()
 
-        # Prepare bundle
-        self.bundle = {}
-        self.bundle["repoid_type_str"] = self.repoid_type_str
-        self.bundle["removed_obj"] = self.removedxmlobj
-        self.bundle["unique_md_filenames"] = self.unique_md_filenames
+        # Prepare global bundle
+        self.globalbundle = GlobalBundle()
+        self.globalbundle.repoid_type_str = self.repoid_type_str
+        self.globalbundle.unique_md_filenames = self.unique_md_filenames
 
     def _new_metadata(self, metadata_type):
         """Return Metadata Object for the metadata_type or None"""
@@ -152,97 +150,51 @@ class DeltaRepoGenerator(LoggingInterface):
 
         return metadata
 
-    def _run_plugin(plugin, metadata_objects):
-        # TODO XXX
-        pass
-
     def gen(self):
 
         # Prepare output path
         os.mkdir(self.delta_repodata_path)
 
+        # Set of types of processed metadata records ("primary", "primary_db"...)
         processed_metadata = set()
-        used_plugins = set()
-        plugin_used = True
-
-        while plugin_used:
-            # Iterate on plugins until any of them was used
-            plugin_used = False
-
-            for plugin in PLUGINS:
-
-                # Use only plugins that haven't been already used
-                if plugin in used_plugins:
-                    continue
-
-                # Check which metadata this plugin want to process
-                conflicting_metadata = set(plugin.METADATA) & processed_metadata
-                if conflicting_metadata:
-                    message = "Plugin {0}: Error - Plugin want to process " \
-                              "already processed metadata {1}".format(
-                               plugin.NAME, conflicting_metadata)
-                    self._error(message)
-                    raise DeltaRepoError(message)
-
-                # Prepare metadata for the plugin
-                metadata_objects = {}
-                for metadata_name in plugin.METADATA:
-                    metadata_object = self._new_metadata(metadata_name)
-                    if metadata_object is not None:
-                        metadata_objects[metadata_name] = metadata_object
-
-                # Skip plugin if no supported metadata available
-                if not metadata_objects:
-                    self._debug("Plugin {0}: Skipped - None of supported " \
-                                "metadata {1} available".format(
-                                plugin.NAME, plugin.METADATA))
-                    used_plugins.add(plugin)
-                    continue
-
-                # Check if bundle contains all what plugin need
-                required_bundle_keys = set(plugin.GEN_REQUIRED_BUNDLE_KEYS)
-                bundle_keys = set(self.bundle.keys())
-                if not required_bundle_keys.issubset(bundle_keys):
-                    self._debug("Plugin {0}: Skipped - Bundle keys {1} "\
-                                "are not available".format(plugin.NAME,
-                                (required_bundle_keys - bundle_keys)))
-                    continue
-
-                # Use the plugin
-                self._debug("Plugin {0}: Active".format(plugin.NAME))
-                plugin_instance = plugin()
-                plugin_instance.gen(metadata_objects, self.bundle)
-
-                # Check what bundle keys was added by the plugin
-                new_bundle_keys = set(self.bundle.keys())
-                diff = new_bundle_keys - bundle_keys
-                if diff != set(plugin.GEN_BUNDLE_CONTRIBUTION):
-                    message = "Plugin {0}: Error - Plugin should add: {1} " \
-                               "bundle items but add: {2}".format(
-                               plugin.NAME, plugin.GEN_BUNDLE_CONTRIBUTION,
-                               list(diff))
-                    self._error(message)
-                    raise DeltaRepoError(message)
-
-                # Put repomd records from processed metadatas to repomd
-                for md in metadata_objects.values():
-                    self._debug("Plugin {0}: Processed \"{1}\" delta record "\
-                                "which produced:".format(
-                                plugin.NAME, md.metadata_type))
-                    for repomd_record in md.generated_repomd_records:
-                        self._debug(" - {0}".format(repomd_record.type))
-                        self.delta_repomd.set_record(repomd_record)
-
-                # Organization stuff
-                for md in metadata_objects.keys():
-                    if md in self.bundle["no_processed"]:
-                        self._debug("Plugin {0}: Skip processing of \"{1}\" delta record".format(
-                                    plugin.NAME, md))
-                        continue
-                    processed_metadata.add(md)
-
-                used_plugins.add(plugin)
-                plugin_used = True
+
+        for plugin in PLUGINS:
+
+            # Prepare metadata for the plugin
+            metadata_objects = {}
+            for metadata_name in plugin.METADATA:
+                metadata_object = self._new_metadata(metadata_name)
+                if metadata_object is not None:
+                    metadata_objects[metadata_name] = metadata_object
+
+            # Skip plugin if no supported metadata available
+            if not metadata_objects:
+                self._debug("Plugin {0}: Skipped - None of supported " \
+                            "metadata {1} available".format(
+                            plugin.NAME, plugin.METADATA))
+                continue
+
+            # Prepare plugin bundle
+            pluginbundle = PluginBundle(plugin.NAME, plugin.VERSION)
+            self.deltametadata.add_pluginbundle(pluginbundle)
+
+            # Use the plugin
+            self._debug("Plugin {0}: Active".format(plugin.NAME))
+            plugin_instance = plugin(pluginbundle, self.globalbundle)
+            plugin_instance.gen(metadata_objects)
+
+            # Put repomd records from processed metadatas to repomd
+            for md in metadata_objects.values():
+                self._debug("Plugin {0}: Processed \"{1}\" delta record "\
+                            "which produced:".format(
+                            plugin.NAME, md.metadata_type))
+                for repomd_record in md.generated_repomd_records:
+                    self._debug(" - {0}".format(repomd_record.type))
+                    self.delta_repomd.set_record(repomd_record)
+
+            # Organization stuff
+            for md in metadata_objects.keys():
+                processed_metadata.add(md)
 
         # Process rest of the metadata files
         metadata_objects = {}
@@ -260,8 +212,8 @@ class DeltaRepoGenerator(LoggingInterface):
         if metadata_objects:
             # Use the plugin
             self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME))
-            plugin_instance = GENERAL_PLUGIN()
-            plugin_instance.gen(metadata_objects, self.bundle)
+            plugin_instance = GENERAL_PLUGIN(None, self.globalbundle)
+            plugin_instance.gen(metadata_objects)
 
             for md in metadata_objects.values():
                 self._debug("Plugin {0}: Processed \"{1}\" delta record "\
@@ -271,41 +223,42 @@ class DeltaRepoGenerator(LoggingInterface):
                     self._debug(" - {0}".format(repomd_record.type))
                     self.delta_repomd.set_record(repomd_record)
 
-        # Write out removed.xml
-        self._debug("Writing removed.xml ...")
-        removedxml_xml = self.removedxmlobj.xml_dump()
-        removedxml_path = os.path.join(self.delta_repodata_path, "removed.xml")
+        # Write out deltametadata.xml
+        deltametadata_xml = self.deltametadata.xmldump()
+        deltametadata_path = os.path.join(self.delta_repodata_path, "deltametadata.xml")
 
         if (self.compression_type != cr.UNKNOWN_COMPRESSION):
-            removedxml_path += cr.compression_suffix(self.compression_type)
+            deltametadata_path += cr.compression_suffix(self.compression_type)
             stat = cr.ContentStat(self.checksum_type)
-            f = cr.CrFile(removedxml_path, cr.MODE_WRITE, cr.XZ, stat)
-            f.write(removedxml_xml)
+            f = cr.CrFile(deltametadata_path, cr.MODE_WRITE,
+                          self.compression_type, stat)
+            f.write(deltametadata_xml)
             f.close()
         else:
-            open(removedxml_path, "w").write(removedxml_xml)
+            open(deltametadata_path, "w").write(deltametadata_xml)
 
-        removedxml_rec = cr.RepomdRecord("removed", removedxml_path)
-        removedxml_rec.load_contentstat(stat)
-        removedxml_rec.fill(self.checksum_type)
+        deltametadata_rec = cr.RepomdRecord("deltametadata", deltametadata_path)
+        deltametadata_rec.load_contentstat(stat)
+        deltametadata_rec.fill(self.checksum_type)
         if self.unique_md_filenames:
-            removedxml_rec.rename_file()
-        self.delta_repomd.set_record(removedxml_rec)
+            deltametadata_rec.rename_file()
+        self.delta_repomd.set_record(deltametadata_rec)
 
         # Check if calculated repoids match
         self._debug("Checking expected repoids")
 
-        if not "new_repoid" in self.bundle or not "old_repoid" in self.bundle:
-            message = "\"new_repoid\" or \"old_repoid\" is missing in bundle"
+        if self.globalbundle.calculated_new_repoid is None \
+                or self.globalbundle.calculated_old_repoid is None:
+            message = "\"new_repoid\" or \"old_repoid\" wasn't calculated"
             self._error(message)
             raise DeltaRepoError(message)
 
         if self.old_id:
-            if self.old_id != self.bundle["old_repoid"]:
+            if self.old_id != self.globalbundle.calculated_old_repoid:
                 message = "Repoid of the \"{0}\" repository doesn't match "\
                           "the real repoid ({1} != {2}).".format(
                            self.old_repo_path, self.old_id,
-                           self.bundle["old_repoid"])
+                           self.globalbundle.calculated_old_repoid)
                 self._error(message)
                 raise DeltaRepoError(message)
             else:
@@ -316,11 +269,11 @@ class DeltaRepoGenerator(LoggingInterface):
                         "repomd".format(self.old_repo_path))
 
         if self.new_id:
-            if self.new_id and self.new_id != self.bundle["new_repoid"]:
+            if self.new_id != self.globalbundle.calculated_new_repoid:
                 message = "Repoid of the \"{0}\" repository doesn't match "\
                           "the real repoid ({1} != {2}).".format(
                            self.new_repo_path, self.new_id,
-                           self.bundle["new_repoid"])
+                           self.globalbundle.calculated_new_repoid)
                 self._error(message)
                 raise DeltaRepoError(message)
             else:
@@ -332,8 +285,8 @@ class DeltaRepoGenerator(LoggingInterface):
 
         # Prepare and write out the new repomd.xml
         self._debug("Preparing repomd.xml ...")
-        deltarepoid = "{0}-{1}".format(self.bundle["old_repoid"],
-                                       self.bundle["new_repoid"])
+        deltarepoid = "{0}-{1}".format(self.globalbundle.calculated_old_repoid,
+                                       self.globalbundle.calculated_new_repoid)
         self.delta_repomd.set_repoid(deltarepoid, self.repoid_type_str)
         self.delta_repomd.sort_records()
         delta_repomd_xml = self.delta_repomd.xml_dump()