header: Update to Vulkan version 1.0.37
authorMark Lobodzinski <mark@lunarg.com>
Mon, 12 Dec 2016 16:44:34 +0000 (09:44 -0700)
committerMark Lobodzinski <mark@lunarg.com>
Mon, 12 Dec 2016 20:45:53 +0000 (13:45 -0700)
- header:  Updated vulkan.h
- scripts: Updated generator.py
- scripts: Updated vk.xml
- scripts: Merged recent genvk.py changes into lvl_genvk.py
- layers:  Updated json files
- loader:  Updated SOVERSION to 37
- header:  Rebuilt and updated vulkan.hpp
- layers:  Updated thread_checker for new functions
- scripts: Fix up vk_helper.py overzealous count detection

Change-Id: Ia10f5fc759166a09999f01e68bbcadb30ddf67f7

23 files changed:
include/vulkan/vk_layer.h
include/vulkan/vulkan.h
include/vulkan/vulkan.hpp
layers/linux/VkLayer_core_validation.json
layers/linux/VkLayer_image.json
layers/linux/VkLayer_object_tracker.json
layers/linux/VkLayer_parameter_validation.json
layers/linux/VkLayer_swapchain.json
layers/linux/VkLayer_threading.json
layers/linux/VkLayer_unique_objects.json
layers/threading.h
layers/windows/VkLayer_core_validation.json
layers/windows/VkLayer_image.json
layers/windows/VkLayer_object_tracker.json
layers/windows/VkLayer_parameter_validation.json
layers/windows/VkLayer_swapchain.json
layers/windows/VkLayer_threading.json
layers/windows/VkLayer_unique_objects.json
loader/CMakeLists.txt
scripts/generator.py
scripts/lvl_genvk.py
scripts/vk.xml
scripts/vulkan.py

index 78f6372..4d0da1a 100644 (file)
@@ -173,6 +173,14 @@ typedef struct VkLayerDispatchTable_ {
     PFN_vkCmdDebugMarkerBeginEXT CmdDebugMarkerBeginEXT;
     PFN_vkCmdDebugMarkerEndEXT CmdDebugMarkerEndEXT;
     PFN_vkCmdDebugMarkerInsertEXT CmdDebugMarkerInsertEXT;
+    PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX;
+    PFN_vkCmdReserveSpaceForCommandsNVX CmdReserveSpaceForCommandsNVX;
+    PFN_vkCreateIndirectCommandsLayoutNVX CreateIndirectCommandsLayoutNVX;
+    PFN_vkDestroyIndirectCommandsLayoutNVX DestroyIndirectCommandsLayoutNVX;
+    PFN_vkCreateObjectTableNVX CreateObjectTableNVX;
+    PFN_vkDestroyObjectTableNVX DestroyObjectTableNVX;
+    PFN_vkRegisterObjectsNVX RegisterObjectsNVX;
+    PFN_vkUnregisterObjectsNVX UnregisterObjectsNVX;
 } VkLayerDispatchTable;
 
 typedef struct VkLayerInstanceDispatchTable_ {
@@ -245,6 +253,8 @@ typedef struct VkLayerInstanceDispatchTable_ {
         CreateDisplayPlaneSurfaceKHR;
     PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV
         GetPhysicalDeviceExternalImageFormatPropertiesNV;
+    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX
+        GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
 } VkLayerInstanceDispatchTable;
 
 // ------------------------------------------------------------------------------------------------
index b33871e..b42cca5 100644 (file)
@@ -43,7 +43,7 @@ extern "C" {
 #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff)
 #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff)
 // Version of this file
-#define VK_HEADER_VERSION 34
+#define VK_HEADER_VERSION 37
 
 
 #define VK_NULL_HANDLE 0
@@ -226,6 +226,12 @@ typedef enum VkStructureType {
     VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001,
     VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000,
     VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000,
+    VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001,
+    VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002,
+    VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005,
     VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO,
     VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
     VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1),
@@ -918,6 +924,7 @@ typedef enum VkPipelineStageFlagBits {
     VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
     VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000,
     VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
+    VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000,
     VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
 } VkPipelineStageFlagBits;
 typedef VkFlags VkPipelineStageFlags;
@@ -1092,6 +1099,8 @@ typedef enum VkAccessFlagBits {
     VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
     VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
     VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+    VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000,
+    VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000,
     VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
 } VkAccessFlagBits;
 typedef VkFlags VkAccessFlags;
@@ -3735,7 +3744,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
 #define VK_EXT_debug_report 1
 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
 
-#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  3
+#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  4
 #define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report"
 #define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT
 
@@ -3770,9 +3779,13 @@ typedef enum VkDebugReportObjectTypeEXT {
     VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26,
     VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27,
     VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = 28,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
+    VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32,
     VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
-    VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
-    VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1),
+    VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1),
     VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
 } VkDebugReportObjectTypeEXT;
 
@@ -4154,6 +4167,232 @@ typedef struct VkValidationFlagsEXT {
 
 
 
+#define VK_NVX_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX)
+
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands"
+
+
+typedef enum VkIndirectCommandsTokenTypeNVX {
+    VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX = 0,
+    VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX = 1,
+    VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX = 2,
+    VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX = 3,
+    VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX = 4,
+    VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX = 5,
+    VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX = 6,
+    VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX = 7,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX + 1),
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNVX;
+
+typedef enum VkObjectEntryTypeNVX {
+    VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX = 0,
+    VK_OBJECT_ENTRY_PIPELINE_NVX = 1,
+    VK_OBJECT_ENTRY_INDEX_BUFFER_NVX = 2,
+    VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX = 3,
+    VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX = 4,
+    VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
+    VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX,
+    VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX + 1),
+    VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryTypeNVX;
+
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX {
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNVX;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX;
+
+typedef enum VkObjectEntryUsageFlagBitsNVX {
+    VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001,
+    VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002,
+    VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryUsageFlagBitsNVX;
+typedef VkFlags VkObjectEntryUsageFlagsNVX;
+
+typedef struct VkDeviceGeneratedCommandsFeaturesNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           computeBindingPointSupport;
+} VkDeviceGeneratedCommandsFeaturesNVX;
+
+typedef struct VkDeviceGeneratedCommandsLimitsNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxIndirectCommandsLayoutTokenCount;
+    uint32_t           maxObjectEntryCounts;
+    uint32_t           minSequenceCountBufferOffsetAlignment;
+    uint32_t           minSequenceIndexBufferOffsetAlignment;
+    uint32_t           minCommandsTokenBufferOffsetAlignment;
+} VkDeviceGeneratedCommandsLimitsNVX;
+
+typedef struct VkIndirectCommandsTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+} VkIndirectCommandsTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    uint32_t                          bindingUnit;
+    uint32_t                          dynamicCount;
+    uint32_t                          divisor;
+} VkIndirectCommandsLayoutTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNVX {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineBindPoint                        pipelineBindPoint;
+    VkIndirectCommandsLayoutUsageFlagsNVX      flags;
+    uint32_t                                   tokenCount;
+    const VkIndirectCommandsLayoutTokenNVX*    pTokens;
+} VkIndirectCommandsLayoutCreateInfoNVX;
+
+typedef struct VkCmdProcessCommandsInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkObjectTableNVX                     objectTable;
+    VkIndirectCommandsLayoutNVX          indirectCommandsLayout;
+    uint32_t                             indirectCommandsTokenCount;
+    const VkIndirectCommandsTokenNVX*    pIndirectCommandsTokens;
+    uint32_t                             maxSequencesCount;
+    VkCommandBuffer                      targetCommandBuffer;
+    VkBuffer                             sequencesCountBuffer;
+    VkDeviceSize                         sequencesCountOffset;
+    VkBuffer                             sequencesIndexBuffer;
+    VkDeviceSize                         sequencesIndexOffset;
+} VkCmdProcessCommandsInfoNVX;
+
+typedef struct VkCmdReserveSpaceForCommandsInfoNVX {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkObjectTableNVX               objectTable;
+    VkIndirectCommandsLayoutNVX    indirectCommandsLayout;
+    uint32_t                       maxSequencesCount;
+} VkCmdReserveSpaceForCommandsInfoNVX;
+
+typedef struct VkObjectTableCreateInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    uint32_t                             objectCount;
+    const VkObjectEntryTypeNVX*          pObjectEntryTypes;
+    const uint32_t*                      pObjectEntryCounts;
+    const VkObjectEntryUsageFlagsNVX*    pObjectEntryUsageFlags;
+    uint32_t                             maxUniformBuffersPerDescriptor;
+    uint32_t                             maxStorageBuffersPerDescriptor;
+    uint32_t                             maxStorageImagesPerDescriptor;
+    uint32_t                             maxSampledImagesPerDescriptor;
+    uint32_t                             maxPipelineLayouts;
+} VkObjectTableCreateInfoNVX;
+
+typedef struct VkObjectTableEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+} VkObjectTableEntryNVX;
+
+typedef struct VkObjectTablePipelineEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipeline                    pipeline;
+} VkObjectTablePipelineEntryNVX;
+
+typedef struct VkObjectTableDescriptorSetEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkDescriptorSet               descriptorSet;
+} VkObjectTableDescriptorSetEntryNVX;
+
+typedef struct VkObjectTableVertexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+} VkObjectTableVertexBufferEntryNVX;
+
+typedef struct VkObjectTableIndexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+} VkObjectTableIndexBufferEntryNVX;
+
+typedef struct VkObjectTablePushConstantEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkShaderStageFlags            stageFlags;
+} VkObjectTablePushConstantEntryNVX;
+
+
+typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable);
+typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices);
+typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+#endif
+
 #ifdef __cplusplus
 }
 #endif
index d212d33..9cd6d8a 100644 (file)
@@ -33,6 +33,7 @@
 #include <initializer_list>
 #include <string>
 #include <system_error>
+#include <tuple>
 #include <type_traits>
 #include <vulkan/vulkan.h>
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -40,7 +41,7 @@
 # include <vector>
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-static_assert( VK_HEADER_VERSION ==  34 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION ==  37 , "Wrong VK_HEADER_VERSION!" );
 
 // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
 // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -65,8 +66,30 @@ static_assert( VK_HEADER_VERSION ==  34 , "Wrong VK_HEADER_VERSION!" );
 # endif
 #endif
 
+
+#if !defined(VULKAN_HPP_INLINE)
+# if defined(__clang___)
+#  if __has_attribute(always_inline)
+#   define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+#  else
+#    define VULKAN_HPP_INLINE inline
+#  endif
+# elif defined(__GNUC__)
+#  define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# elif defined(_MSC_VER)
+#  define VULKAN_HPP_INLINE __forceinline
+# else
+#  define VULKAN_HPP_INLINE inline
+# endif
+#endif
+
 namespace vk
 {
+  template <typename FlagBitsType> struct FlagTraits
+  {
+    enum { allFlags = 0 };
+  };
+
   template <typename BitType, typename MaskType = VkFlags>
   class Flags
   {
@@ -136,6 +159,13 @@ namespace vk
       return !m_mask;
     }
 
+    Flags<BitType> operator~() const
+    {
+      Flags<BitType> result(*this);
+      result.m_mask ^= FlagTraits<BitType>::allFlags;
+      return result;
+    }
+
     bool operator==(Flags<BitType> const& rhs) const
     {
       return m_mask == rhs.m_mask;
@@ -178,11 +208,13 @@ namespace vk
     return flags ^ bit;
   }
 
+
   template <typename RefType>
   class Optional
   {
   public:
     Optional(RefType & reference) { m_ptr = &reference; }
+    Optional(RefType * ptr) { m_ptr = ptr; }
     Optional(std::nullptr_t) { m_ptr = nullptr; }
 
     operator RefType*() const { return m_ptr; }
@@ -314,7 +346,7 @@ namespace vk
     eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV
   };
 
-  inline std::string to_string(Result value)
+  VULKAN_HPP_INLINE std::string to_string(Result value)
   {
     switch (value)
     {
@@ -362,18 +394,18 @@ namespace vk
 # undef noexcept
 #endif
 
-  inline const std::error_category& errorCategory()
+  VULKAN_HPP_INLINE const std::error_category& errorCategory()
   {
     static ErrorCategoryImpl instance;
     return instance;
   }
 
-  inline std::error_code make_error_code(Result e)
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
   {
     return std::error_code(static_cast<int>(e), errorCategory());
   }
 
-  inline std::error_condition make_error_condition(Result e)
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
   {
     return std::error_condition(static_cast<int>(e), errorCategory());
   }
@@ -399,6 +431,8 @@ namespace vk
 
     Result  result;
     T       value;
+
+    operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
   };
 
   template <typename T>
@@ -420,7 +454,7 @@ namespace vk
 #endif
   };
 
-  inline ResultValueType<void>::type createResultValue( Result result, char const * message )
+  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
     assert( result == Result::eSuccess );
@@ -434,7 +468,7 @@ namespace vk
   }
 
   template <typename T>
-  inline typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
     assert( result == Result::eSuccess );
@@ -448,7 +482,7 @@ namespace vk
 #endif
   }
 
-  inline Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
+  VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
     assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
@@ -462,7 +496,7 @@ namespace vk
   }
 
   template <typename T>
-  inline ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+  VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
     assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
@@ -487,7 +521,7 @@ namespace vk
 
   using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
 
-  inline FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
   {
     return FramebufferCreateFlags( bit0 ) | bit1;
   }
@@ -498,7 +532,7 @@ namespace vk
 
   using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
 
-  inline QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
   {
     return QueryPoolCreateFlags( bit0 ) | bit1;
   }
@@ -509,7 +543,7 @@ namespace vk
 
   using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
 
-  inline RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
   {
     return RenderPassCreateFlags( bit0 ) | bit1;
   }
@@ -520,7 +554,7 @@ namespace vk
 
   using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
 
-  inline SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
   {
     return SamplerCreateFlags( bit0 ) | bit1;
   }
@@ -531,7 +565,7 @@ namespace vk
 
   using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
 
-  inline PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
   {
     return PipelineLayoutCreateFlags( bit0 ) | bit1;
   }
@@ -542,7 +576,7 @@ namespace vk
 
   using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
 
-  inline PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
   {
     return PipelineCacheCreateFlags( bit0 ) | bit1;
   }
@@ -553,7 +587,7 @@ namespace vk
 
   using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
 
-  inline PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
   {
     return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
   }
@@ -564,7 +598,7 @@ namespace vk
 
   using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
 
-  inline PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
   {
     return PipelineDynamicStateCreateFlags( bit0 ) | bit1;
   }
@@ -575,7 +609,7 @@ namespace vk
 
   using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
 
-  inline PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
   {
     return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
   }
@@ -586,7 +620,7 @@ namespace vk
 
   using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
 
-  inline PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
   {
     return PipelineMultisampleStateCreateFlags( bit0 ) | bit1;
   }
@@ -597,7 +631,7 @@ namespace vk
 
   using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
 
-  inline PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
   {
     return PipelineRasterizationStateCreateFlags( bit0 ) | bit1;
   }
@@ -608,7 +642,7 @@ namespace vk
 
   using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
 
-  inline PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
   {
     return PipelineViewportStateCreateFlags( bit0 ) | bit1;
   }
@@ -619,7 +653,7 @@ namespace vk
 
   using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
 
-  inline PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
   {
     return PipelineTessellationStateCreateFlags( bit0 ) | bit1;
   }
@@ -630,7 +664,7 @@ namespace vk
 
   using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
 
-  inline PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
   {
     return PipelineInputAssemblyStateCreateFlags( bit0 ) | bit1;
   }
@@ -641,7 +675,7 @@ namespace vk
 
   using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
 
-  inline PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
   {
     return PipelineVertexInputStateCreateFlags( bit0 ) | bit1;
   }
@@ -652,7 +686,7 @@ namespace vk
 
   using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
 
-  inline PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
   {
     return PipelineShaderStageCreateFlags( bit0 ) | bit1;
   }
@@ -663,7 +697,7 @@ namespace vk
 
   using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
 
-  inline DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
   {
     return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
   }
@@ -674,7 +708,7 @@ namespace vk
 
   using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
 
-  inline BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
   {
     return BufferViewCreateFlags( bit0 ) | bit1;
   }
@@ -685,7 +719,7 @@ namespace vk
 
   using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
 
-  inline InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
   {
     return InstanceCreateFlags( bit0 ) | bit1;
   }
@@ -696,7 +730,7 @@ namespace vk
 
   using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
 
-  inline DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
   {
     return DeviceCreateFlags( bit0 ) | bit1;
   }
@@ -707,7 +741,7 @@ namespace vk
 
   using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
 
-  inline DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
   {
     return DeviceQueueCreateFlags( bit0 ) | bit1;
   }
@@ -718,7 +752,7 @@ namespace vk
 
   using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
 
-  inline ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
   {
     return ImageViewCreateFlags( bit0 ) | bit1;
   }
@@ -729,7 +763,7 @@ namespace vk
 
   using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
 
-  inline SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
   {
     return SemaphoreCreateFlags( bit0 ) | bit1;
   }
@@ -740,7 +774,7 @@ namespace vk
 
   using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
 
-  inline ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
   {
     return ShaderModuleCreateFlags( bit0 ) | bit1;
   }
@@ -751,7 +785,7 @@ namespace vk
 
   using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
 
-  inline EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
   {
     return EventCreateFlags( bit0 ) | bit1;
   }
@@ -762,7 +796,7 @@ namespace vk
 
   using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
 
-  inline MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
+  VULKAN_HPP_INLINE MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
   {
     return MemoryMapFlags( bit0 ) | bit1;
   }
@@ -773,7 +807,7 @@ namespace vk
 
   using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
 
-  inline SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
+  VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
   {
     return SubpassDescriptionFlags( bit0 ) | bit1;
   }
@@ -784,7 +818,7 @@ namespace vk
 
   using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
 
-  inline DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
+  VULKAN_HPP_INLINE DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
   {
     return DescriptorPoolResetFlags( bit0 ) | bit1;
   }
@@ -795,7 +829,7 @@ namespace vk
 
   using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
 
-  inline SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
   {
     return SwapchainCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -806,7 +840,7 @@ namespace vk
 
   using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
 
-  inline DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
   {
     return DisplayModeCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -817,7 +851,7 @@ namespace vk
 
   using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
 
-  inline DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
   {
     return DisplaySurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -831,7 +865,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
   using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
 
-  inline AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
   {
     return AndroidSurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -846,7 +880,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_MIR_KHR
   using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
 
-  inline MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
   {
     return MirSurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -861,7 +895,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
   using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
 
-  inline WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
   {
     return WaylandSurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -876,7 +910,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_WIN32_KHR
   using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
 
-  inline Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
   {
     return Win32SurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -891,7 +925,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_XLIB_KHR
   using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
 
-  inline XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
   {
     return XlibSurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -906,7 +940,7 @@ namespace vk
 #ifdef VK_USE_PLATFORM_XCB_KHR
   using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
 
-  inline XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
+  VULKAN_HPP_INLINE XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
   {
     return XcbSurfaceCreateFlagsKHR( bit0 ) | bit1;
   }
@@ -2052,6 +2086,120 @@ namespace vk
   };
   static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
 
+  class ObjectTableNVX
+  {
+  public:
+    ObjectTableNVX()
+      : m_objectTableNVX(VK_NULL_HANDLE)
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ObjectTableNVX(VkObjectTableNVX objectTableNVX)
+       : m_objectTableNVX(objectTableNVX)
+    {}
+
+    ObjectTableNVX& operator=(VkObjectTableNVX objectTableNVX)
+    {
+      m_objectTableNVX = objectTableNVX;
+      return *this;
+    }
+#endif
+
+    bool operator==(ObjectTableNVX const &rhs) const
+    {
+      return m_objectTableNVX == rhs.m_objectTableNVX;
+    }
+
+    bool operator!=(ObjectTableNVX const &rhs) const
+    {
+      return m_objectTableNVX != rhs.m_objectTableNVX;
+    }
+
+    bool operator<(ObjectTableNVX const &rhs) const
+    {
+      return m_objectTableNVX < rhs.m_objectTableNVX;
+    }
+
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    explicit
+#endif
+    operator VkObjectTableNVX() const
+    {
+      return m_objectTableNVX;
+    }
+
+    explicit operator bool() const
+    {
+      return m_objectTableNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_objectTableNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkObjectTableNVX m_objectTableNVX;
+  };
+  static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+
+  class IndirectCommandsLayoutNVX
+  {
+  public:
+    IndirectCommandsLayoutNVX()
+      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNVX(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+       : m_indirectCommandsLayoutNVX(indirectCommandsLayoutNVX)
+    {}
+
+    IndirectCommandsLayoutNVX& operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+    {
+      m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
+      return *this;
+    }
+#endif
+
+    bool operator==(IndirectCommandsLayoutNVX const &rhs) const
+    {
+      return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator!=(IndirectCommandsLayoutNVX const &rhs) const
+    {
+      return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator<(IndirectCommandsLayoutNVX const &rhs) const
+    {
+      return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+    }
+
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    explicit
+#endif
+    operator VkIndirectCommandsLayoutNVX() const
+    {
+      return m_indirectCommandsLayoutNVX;
+    }
+
+    explicit operator bool() const
+    {
+      return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
+
   class DisplayKHR
   {
   public:
@@ -4758,11 +4906,24 @@ namespace vk
 
   using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
 
-  inline CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
+  VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
   {
     return CullModeFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
+  {
+    return ~( CullModeFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+    };
+  };
+
   enum class FrontFace
   {
     eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
@@ -5362,7 +5523,13 @@ namespace vk
     eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
     eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
     eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
-    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT
+    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+    eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
+    eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
+    eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
+    eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
+    eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
+    eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
   };
 
   struct ApplicationInfo
@@ -8938,103 +9105,153 @@ namespace vk
   static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  enum class SubpassContents
-  {
-    eInline = VK_SUBPASS_CONTENTS_INLINE,
-    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
-  };
-
-  struct PresentInfoKHR
+  struct DeviceGeneratedCommandsFeaturesNVX
   {
-    PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
-      : sType( StructureType::ePresentInfoKHR )
+    DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
+      : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
       , pNext( nullptr )
-      , waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphores( pWaitSemaphores_ )
-      , swapchainCount( swapchainCount_ )
-      , pSwapchains( pSwapchains_ )
-      , pImageIndices( pImageIndices_ )
-      , pResults( pResults_ )
+      , computeBindingPointSupport( computeBindingPointSupport_ )
     {
     }
 
-    PresentInfoKHR( VkPresentInfoKHR const & rhs )
+    DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+      memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
     }
 
-    PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+    DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+      memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
       return *this;
     }
 
-    PresentInfoKHR& setSType( StructureType sType_ )
+    DeviceGeneratedCommandsFeaturesNVX& setSType( StructureType sType_ )
     {
       sType = sType_;
       return *this;
     }
 
-    PresentInfoKHR& setPNext( const void* pNext_ )
+    DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
     {
       pNext = pNext_;
       return *this;
     }
 
-    PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
     {
-      waitSemaphoreCount = waitSemaphoreCount_;
+      computeBindingPointSupport = computeBindingPointSupport_;
       return *this;
     }
 
-    PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
     {
-      pWaitSemaphores = pWaitSemaphores_;
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
+    }
+
+    bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+    }
+
+    bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 computeBindingPointSupport;
+  };
+  static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
+
+  struct DeviceGeneratedCommandsLimitsNVX
+  {
+    DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
+      : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
+      , pNext( nullptr )
+      , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+      , maxObjectEntryCounts( maxObjectEntryCounts_ )
+      , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+      , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+      , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+    {
+    }
+
+    DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
       return *this;
     }
 
-    PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+    DeviceGeneratedCommandsLimitsNVX& setSType( StructureType sType_ )
     {
-      swapchainCount = swapchainCount_;
+      sType = sType_;
       return *this;
     }
 
-    PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+    DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
     {
-      pSwapchains = pSwapchains_;
+      pNext = pNext_;
       return *this;
     }
 
-    PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+    DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
     {
-      pImageIndices = pImageIndices_;
+      maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
       return *this;
     }
 
-    PresentInfoKHR& setPResults( Result* pResults_ )
+    DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
     {
-      pResults = pResults_;
+      maxObjectEntryCounts = maxObjectEntryCounts_;
       return *this;
     }
 
-    operator const VkPresentInfoKHR&() const
+    DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
     {
-      return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+      minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+      return *this;
     }
 
-    bool operator==( PresentInfoKHR const& rhs ) const
+    DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
+    {
+      minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
+    {
+      minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+      return *this;
+    }
+
+    operator const VkDeviceGeneratedCommandsLimitsNVX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
+    }
+
+    bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
-          && ( pWaitSemaphores == rhs.pWaitSemaphores )
-          && ( swapchainCount == rhs.swapchainCount )
-          && ( pSwapchains == rhs.pSwapchains )
-          && ( pImageIndices == rhs.pImageIndices )
-          && ( pResults == rhs.pResults );
+          && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
+          && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
+          && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
+          && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
+          && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
     }
 
-    bool operator!=( PresentInfoKHR const& rhs ) const
+    bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
     {
       return !operator==( rhs );
     }
@@ -9044,95 +9261,81 @@ namespace vk
 
   public:
     const void* pNext;
-    uint32_t waitSemaphoreCount;
-    const Semaphore* pWaitSemaphores;
-    uint32_t swapchainCount;
-    const SwapchainKHR* pSwapchains;
-    const uint32_t* pImageIndices;
-    Result* pResults;
-  };
-  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class DynamicState
-  {
-    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
-    eScissor = VK_DYNAMIC_STATE_SCISSOR,
-    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
-    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
-    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
-    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
-    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
-    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
-    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
+    uint32_t maxIndirectCommandsLayoutTokenCount;
+    uint32_t maxObjectEntryCounts;
+    uint32_t minSequenceCountBufferOffsetAlignment;
+    uint32_t minSequenceIndexBufferOffsetAlignment;
+    uint32_t minCommandsTokenBufferOffsetAlignment;
   };
+  static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
 
-  struct PipelineDynamicStateCreateInfo
+  struct CmdReserveSpaceForCommandsInfoNVX
   {
-    PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
-      : sType( StructureType::ePipelineDynamicStateCreateInfo )
+    CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
+      : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
       , pNext( nullptr )
-      , flags( flags_ )
-      , dynamicStateCount( dynamicStateCount_ )
-      , pDynamicStates( pDynamicStates_ )
+      , objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , maxSequencesCount( maxSequencesCount_ )
     {
     }
 
-    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+    CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+      memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
     }
 
-    PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+    CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+      memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
       return *this;
     }
 
-    PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
+    CmdReserveSpaceForCommandsInfoNVX& setSType( StructureType sType_ )
     {
       sType = sType_;
       return *this;
     }
 
-    PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+    CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+    CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
     {
-      flags = flags_;
+      objectTable = objectTable_;
       return *this;
     }
 
-    PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+    CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
     {
-      dynamicStateCount = dynamicStateCount_;
+      indirectCommandsLayout = indirectCommandsLayout_;
       return *this;
     }
 
-    PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+    CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
     {
-      pDynamicStates = pDynamicStates_;
+      maxSequencesCount = maxSequencesCount_;
       return *this;
     }
 
-    operator const VkPipelineDynamicStateCreateInfo&() const
+    operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
     {
-      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+      return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
     }
 
-    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+    bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( dynamicStateCount == rhs.dynamicStateCount )
-          && ( pDynamicStates == rhs.pDynamicStates );
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( maxSequencesCount == rhs.maxSequencesCount );
     }
 
-    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+    bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
     {
       return !operator==( rhs );
     }
@@ -9142,36 +9345,259 @@ namespace vk
 
   public:
     const void* pNext;
-    PipelineDynamicStateCreateFlags flags;
-    uint32_t dynamicStateCount;
-    const DynamicState* pDynamicStates;
+    ObjectTableNVX objectTable;
+    IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t maxSequencesCount;
   };
-  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
 
-  enum class QueueFlagBits
+  enum class SubpassContents
   {
-    eGraphics = VK_QUEUE_GRAPHICS_BIT,
-    eCompute = VK_QUEUE_COMPUTE_BIT,
-    eTransfer = VK_QUEUE_TRANSFER_BIT,
-    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
   };
 
-  using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
-
-  inline QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
-  {
-    return QueueFlags( bit0 ) | bit1;
-  }
-
-  struct QueueFamilyProperties
+  struct PresentInfoKHR
   {
-    operator const VkQueueFamilyProperties&() const
-    {
-      return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
-    }
-
-    bool operator==( QueueFamilyProperties const& rhs ) const
-    {
+    PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
+      : sType( StructureType::ePresentInfoKHR )
+      , pNext( nullptr )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , swapchainCount( swapchainCount_ )
+      , pSwapchains( pSwapchains_ )
+      , pImageIndices( pImageIndices_ )
+      , pResults( pResults_ )
+    {
+    }
+
+    PresentInfoKHR( VkPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+    }
+
+    PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+      return *this;
+    }
+
+    PresentInfoKHR& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+    {
+      pSwapchains = pSwapchains_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPResults( Result* pResults_ )
+    {
+      pResults = pResults_;
+      return *this;
+    }
+
+    operator const VkPresentInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+    }
+
+    bool operator==( PresentInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pSwapchains == rhs.pSwapchains )
+          && ( pImageIndices == rhs.pImageIndices )
+          && ( pResults == rhs.pResults );
+    }
+
+    bool operator!=( PresentInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const Semaphore* pWaitSemaphores;
+    uint32_t swapchainCount;
+    const SwapchainKHR* pSwapchains;
+    const uint32_t* pImageIndices;
+    Result* pResults;
+  };
+  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
+  };
+
+  struct PipelineDynamicStateCreateInfo
+  {
+    PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
+      : sType( StructureType::ePipelineDynamicStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , dynamicStateCount( dynamicStateCount_ )
+      , pDynamicStates( pDynamicStates_ )
+    {
+    }
+
+    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+    }
+
+    PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+    {
+      dynamicStateCount = dynamicStateCount_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+    {
+      pDynamicStates = pDynamicStates_;
+      return *this;
+    }
+
+    operator const VkPipelineDynamicStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dynamicStateCount == rhs.dynamicStateCount )
+          && ( pDynamicStates == rhs.pDynamicStates );
+    }
+
+    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineDynamicStateCreateFlags flags;
+    uint32_t dynamicStateCount;
+    const DynamicState* pDynamicStates;
+  };
+  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class QueueFlagBits
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
+  };
+
+  using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+  VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
+  {
+    return QueueFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
+  {
+    return ~( QueueFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
+    };
+  };
+
+  struct QueueFamilyProperties
+  {
+    operator const VkQueueFamilyProperties&() const
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
+    }
+
+    bool operator==( QueueFamilyProperties const& rhs ) const
+    {
       return ( queueFlags == rhs.queueFlags )
           && ( queueCount == rhs.queueCount )
           && ( timestampValidBits == rhs.timestampValidBits )
@@ -9201,11 +9627,24 @@ namespace vk
 
   using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
 
-  inline MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
+  VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
   {
     return MemoryPropertyFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
+  {
+    return ~( MemoryPropertyFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
+    };
+  };
+
   struct MemoryType
   {
     operator const VkMemoryType&() const
@@ -9236,11 +9675,24 @@ namespace vk
 
   using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
 
-  inline MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
+  VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
   {
     return MemoryHeapFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
+  {
+    return ~( MemoryHeapFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal)
+    };
+  };
+
   struct MemoryHeap
   {
     operator const VkMemoryHeap&() const
@@ -9309,16 +9761,31 @@ namespace vk
     eHostRead = VK_ACCESS_HOST_READ_BIT,
     eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
     eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
-    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+    eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX
   };
 
   using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
 
-  inline AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
+  VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
   {
     return AccessFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
+  {
+    return ~( AccessFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX)
+    };
+  };
+
   struct MemoryBarrier
   {
     MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
@@ -9525,25 +9992,51 @@ namespace vk
 
   using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
 
-  inline BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
+  VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
   {
     return BufferUsageFlags( bit0 ) | bit1;
   }
 
-  enum class BufferCreateFlagBits
+  VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
   {
-    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
-    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    return ~( BufferUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
+    };
+  };
+
+  enum class BufferCreateFlagBits
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
     eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
   };
 
   using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
 
-  inline BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
   {
     return BufferCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
+  {
+    return ~( BufferCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
+    };
+  };
+
   struct BufferCreateInfo
   {
     BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
@@ -9667,11 +10160,24 @@ namespace vk
 
   using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
 
-  inline ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
+  VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
   {
     return ShaderStageFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
+  {
+    return ~( ShaderStageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
+    };
+  };
+
   struct DescriptorSetLayoutBinding
   {
     DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
@@ -10108,11 +10614,24 @@ namespace vk
 
   using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
 
-  inline ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
+  VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
   {
     return ImageUsageFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
+  {
+    return ~( ImageUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
+    };
+  };
+
   enum class ImageCreateFlagBits
   {
     eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
@@ -10124,11 +10643,24 @@ namespace vk
 
   using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
 
-  inline ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
   {
     return ImageCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
+  {
+    return ~( ImageCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible)
+    };
+  };
+
   enum class PipelineCreateFlagBits
   {
     eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
@@ -10138,11 +10670,24 @@ namespace vk
 
   using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
 
-  inline PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
   {
     return PipelineCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
+  {
+    return ~( PipelineCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative)
+    };
+  };
+
   struct ComputePipelineCreateInfo
   {
     ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
@@ -10253,11 +10798,24 @@ namespace vk
 
   using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
 
-  inline ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
+  VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
   {
     return ColorComponentFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
+  {
+    return ~( ColorComponentFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+    };
+  };
+
   struct PipelineColorBlendAttachmentState
   {
     PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
@@ -10480,11 +11038,24 @@ namespace vk
 
   using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
 
-  inline FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
   {
     return FenceCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
+  {
+    return ~( FenceCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+    };
+  };
+
   struct FenceCreateInfo
   {
     FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
@@ -10569,11 +11140,24 @@ namespace vk
 
   using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
 
-  inline FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
+  VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
   {
     return FormatFeatureFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
+  {
+    return ~( FormatFeatureFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG)
+    };
+  };
+
   struct FormatProperties
   {
     operator const VkFormatProperties&() const
@@ -10606,11 +11190,24 @@ namespace vk
 
   using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
 
-  inline QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
+  VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
   {
     return QueryControlFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
+  {
+    return ~( QueryControlFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+    };
+  };
+
   enum class QueryResultFlagBits
   {
     e64 = VK_QUERY_RESULT_64_BIT,
@@ -10621,11 +11218,24 @@ namespace vk
 
   using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
 
-  inline QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
+  VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
   {
     return QueryResultFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
+  {
+    return ~( QueryResultFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+    };
+  };
+
   enum class CommandBufferUsageFlagBits
   {
     eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
@@ -10635,11 +11245,24 @@ namespace vk
 
   using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
 
-  inline CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
+  VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
   {
     return CommandBufferUsageFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
+  {
+    return ~( CommandBufferUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+    };
+  };
+
   enum class QueryPipelineStatisticFlagBits
   {
     eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
@@ -10657,11 +11280,24 @@ namespace vk
 
   using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
 
-  inline QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
+  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
   {
     return QueryPipelineStatisticFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
+  {
+    return ~( QueryPipelineStatisticFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+    };
+  };
+
   struct CommandBufferInheritanceInfo
   {
     CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
@@ -10945,11 +11581,24 @@ namespace vk
 
   using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
 
-  inline ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
+  VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
   {
     return ImageAspectFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
+  {
+    return ~( ImageAspectFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
+    };
+  };
+
   struct ImageSubresource
   {
     ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
@@ -11770,11 +12419,24 @@ namespace vk
 
   using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
 
-  inline SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
+  VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
   {
     return SparseImageFormatFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
+  {
+    return ~( SparseImageFormatFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+    };
+  };
+
   struct SparseImageFormatProperties
   {
     operator const VkSparseImageFormatProperties&() const
@@ -11836,11 +12498,24 @@ namespace vk
 
   using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
 
-  inline SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
+  VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
   {
     return SparseMemoryBindFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
+  {
+    return ~( SparseMemoryBindFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+    };
+  };
+
   struct SparseMemoryBind
   {
     SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
@@ -12354,16 +13029,30 @@ namespace vk
     eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
     eHost = VK_PIPELINE_STAGE_HOST_BIT,
     eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
-    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
   };
 
   using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
 
-  inline PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
+  VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
   {
     return PipelineStageFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
+  {
+    return ~( PipelineStageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
+    };
+  };
+
   enum class CommandPoolCreateFlagBits
   {
     eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
@@ -12372,11 +13061,24 @@ namespace vk
 
   using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
 
-  inline CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
   {
     return CommandPoolCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
+  {
+    return ~( CommandPoolCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
+    };
+  };
+
   struct CommandPoolCreateInfo
   {
     CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
@@ -12457,11 +13159,24 @@ namespace vk
 
   using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
 
-  inline CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
+  VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
   {
     return CommandPoolResetFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
+  {
+    return ~( CommandPoolResetFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+    };
+  };
+
   enum class CommandBufferResetFlagBits
   {
     eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
@@ -12469,11 +13184,24 @@ namespace vk
 
   using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
 
-  inline CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
+  VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
   {
     return CommandBufferResetFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
+  {
+    return ~( CommandBufferResetFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+    };
+  };
+
   enum class SampleCountFlagBits
   {
     e1 = VK_SAMPLE_COUNT_1_BIT,
@@ -12487,11 +13215,24 @@ namespace vk
 
   using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
 
-  inline SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
+  VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
   {
     return SampleCountFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
+  {
+    return ~( SampleCountFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+    };
+  };
+
   struct ImageFormatProperties
   {
     operator const VkImageFormatProperties&() const
@@ -13294,11 +14035,24 @@ namespace vk
 
   using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
 
-  inline AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
+  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
   {
     return AttachmentDescriptionFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
+  {
+    return ~( AttachmentDescriptionFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+    };
+  };
+
   struct AttachmentDescription
   {
     AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
@@ -13423,11 +14177,24 @@ namespace vk
 
   using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
 
-  inline StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
+  VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
   {
     return StencilFaceFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
+  {
+    return ~( StencilFaceFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
+    };
+  };
+
   enum class DescriptorPoolCreateFlagBits
   {
     eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
@@ -13435,11 +14202,24 @@ namespace vk
 
   using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
 
-  inline DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
+  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
   {
     return DescriptorPoolCreateFlags( bit0 ) | bit1;
   }
 
+  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
+  {
+    return ~( DescriptorPoolCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
+    };
+  };
+
   struct DescriptorPoolCreateInfo
   {
     DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
@@ -13538,2030 +14318,3314 @@ namespace vk
 
   using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
 
-  inline DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
+  VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
   {
     return DependencyFlags( bit0 ) | bit1;
   }
 
-  class CommandBuffer
+  VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
   {
-  public:
-    CommandBuffer()
-      : m_commandBuffer(VK_NULL_HANDLE)
-    {}
+    return ~( DependencyFlags( bits ) );
+  }
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    CommandBuffer(VkCommandBuffer commandBuffer)
-       : m_commandBuffer(commandBuffer)
-    {}
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DependencyFlagBits::eByRegion)
+    };
+  };
 
-    CommandBuffer& operator=(VkCommandBuffer commandBuffer)
+  struct SubpassDependency
+  {
+    SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
+      : srcSubpass( srcSubpass_ )
+      , dstSubpass( dstSubpass_ )
+      , srcStageMask( srcStageMask_ )
+      , dstStageMask( dstStageMask_ )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , dependencyFlags( dependencyFlags_ )
     {
-      m_commandBuffer = commandBuffer;
-      return *this;
     }
-#endif
 
-    bool operator==(CommandBuffer const &rhs) const
+    SubpassDependency( VkSubpassDependency const & rhs )
     {
-      return m_commandBuffer == rhs.m_commandBuffer;
+      memcpy( this, &rhs, sizeof(SubpassDependency) );
     }
 
-    bool operator!=(CommandBuffer const &rhs) const
+    SubpassDependency& operator=( VkSubpassDependency const & rhs )
     {
-      return m_commandBuffer != rhs.m_commandBuffer;
+      memcpy( this, &rhs, sizeof(SubpassDependency) );
+      return *this;
     }
 
-    bool operator<(CommandBuffer const &rhs) const
+    SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
     {
-      return m_commandBuffer < rhs.m_commandBuffer;
+      srcSubpass = srcSubpass_;
+      return *this;
     }
 
-    Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
+    SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
     {
-      return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+      dstSubpass = dstSubpass_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
+    SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
     {
-      Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
-      return createResultValue( result, "vk::CommandBuffer::begin" );
+      srcStageMask = srcStageMask_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    Result end(  ) const
+    SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
     {
-      return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+      dstStageMask = dstStageMask_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type end() const
+    SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
     {
-      Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
-      return createResultValue( result, "vk::CommandBuffer::end" );
+      srcAccessMask = srcAccessMask_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    Result reset( CommandBufferResetFlags flags ) const
+    SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
     {
-      return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+      dstAccessMask = dstAccessMask_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
+    SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
     {
-      Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
-      return createResultValue( result, "vk::CommandBuffer::reset" );
+      dependencyFlags = dependencyFlags_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
+    operator const VkSubpassDependency&() const
     {
-      vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+      return *reinterpret_cast<const VkSubpassDependency*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
+    bool operator==( SubpassDependency const& rhs ) const
     {
-      vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+      return ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
+    bool operator!=( SubpassDependency const& rhs ) const
     {
-      vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+      return !operator==( rhs );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
+    uint32_t srcSubpass;
+    uint32_t dstSubpass;
+    PipelineStageFlags srcStageMask;
+    PipelineStageFlags dstStageMask;
+    AccessFlags srcAccessMask;
+    AccessFlags dstAccessMask;
+    DependencyFlags dependencyFlags;
+  };
+  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+
+  struct RenderPassCreateInfo
+  {
+    RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
+      : sType( StructureType::eRenderPassCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , subpassCount( subpassCount_ )
+      , pSubpasses( pSubpasses_ )
+      , dependencyCount( dependencyCount_ )
+      , pDependencies( pDependencies_ )
     {
-      vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
+    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
     {
-      vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+      memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
+    RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
     {
-      vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+      memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setLineWidth( float lineWidth ) const
+    RenderPassCreateInfo& setSType( StructureType sType_ )
     {
-      vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+      sType = sType_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setLineWidth( float lineWidth ) const
+    RenderPassCreateInfo& setPNext( const void* pNext_ )
     {
-      vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+      pNext = pNext_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+    RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
     {
-      vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+      flags = flags_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+    RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
     {
-      vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+      attachmentCount = attachmentCount_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setBlendConstants( const float blendConstants[4] ) const
+    RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
     {
-      vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+      pAttachments = pAttachments_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setBlendConstants( const float blendConstants[4] ) const
+    RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
     {
-      vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+      subpassCount = subpassCount_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
+    RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
     {
-      vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+      pSubpasses = pSubpasses_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
+    RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
     {
-      vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+      dependencyCount = dependencyCount_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
+    RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
     {
-      vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+      pDependencies = pDependencies_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
+    operator const VkRenderPassCreateInfo&() const
     {
-      vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+      return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
+    bool operator==( RenderPassCreateInfo const& rhs ) const
     {
-      vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
+    bool operator!=( RenderPassCreateInfo const& rhs ) const
     {
-      vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    const AttachmentDescription* pAttachments;
+    uint32_t subpassCount;
+    const SubpassDescription* pSubpasses;
+    uint32_t dependencyCount;
+    const SubpassDependency* pDependencies;
+  };
+  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
+  };
+
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
+  };
+
+  struct SurfaceFormatKHR
+  {
+    operator const VkSurfaceFormatKHR&() const
     {
-      vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+      return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
+    bool operator==( SurfaceFormatKHR const& rhs ) const
     {
-      vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+      return ( format == rhs.format )
+          && ( colorSpace == rhs.colorSpace );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
+    bool operator!=( SurfaceFormatKHR const& rhs ) const
     {
-      vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+      return !operator==( rhs );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
+    Format format;
+    ColorSpaceKHR colorSpace;
+  };
+  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+
+  enum class DisplayPlaneAlphaFlagBitsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
+  {
+    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    enum
     {
-      vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
-    }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+      allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+    };
+  };
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
+  struct DisplayPlaneCapabilitiesKHR
+  {
+    operator const VkDisplayPlaneCapabilitiesKHR&() const
     {
-      vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
+    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
     {
-      vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+      return ( supportedAlpha == rhs.supportedAlpha )
+          && ( minSrcPosition == rhs.minSrcPosition )
+          && ( maxSrcPosition == rhs.maxSrcPosition )
+          && ( minSrcExtent == rhs.minSrcExtent )
+          && ( maxSrcExtent == rhs.maxSrcExtent )
+          && ( minDstPosition == rhs.minDstPosition )
+          && ( maxDstPosition == rhs.maxDstPosition )
+          && ( minDstExtent == rhs.minDstExtent )
+          && ( maxDstExtent == rhs.maxDstExtent );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
+    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
     {
-      vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
+      return !operator==( rhs );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
-    {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-      assert( buffers.size() == offsets.size() );
-#else
-      if ( buffers.size() != offsets.size() )
-      {
-        throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
-      }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-      vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
+    DisplayPlaneAlphaFlagsKHR supportedAlpha;
+    Offset2D minSrcPosition;
+    Offset2D maxSrcPosition;
+    Extent2D minSrcExtent;
+    Extent2D maxSrcExtent;
+    Offset2D minDstPosition;
+    Offset2D maxDstPosition;
+    Extent2D minDstExtent;
+    Extent2D maxDstExtent;
+  };
+  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+  enum class CompositeAlphaFlagBitsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
+  {
+    return ~( CompositeAlphaFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+    };
+  };
+
+  enum class SurfaceTransformFlagBitsKHR
+  {
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+  };
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
+  {
+    return ~( SurfaceTransformFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+    };
+  };
+
+  struct DisplayPropertiesKHR
+  {
+    operator const VkDisplayPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+    }
+
+    bool operator==( DisplayPropertiesKHR const& rhs ) const
+    {
+      return ( display == rhs.display )
+          && ( displayName == rhs.displayName )
+          && ( physicalDimensions == rhs.physicalDimensions )
+          && ( physicalResolution == rhs.physicalResolution )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( planeReorderPossible == rhs.planeReorderPossible )
+          && ( persistentContent == rhs.persistentContent );
+    }
+
+    bool operator!=( DisplayPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DisplayKHR display;
+    const char* displayName;
+    Extent2D physicalDimensions;
+    Extent2D physicalResolution;
+    SurfaceTransformFlagsKHR supportedTransforms;
+    Bool32 planeReorderPossible;
+    Bool32 persistentContent;
+  };
+  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct DisplaySurfaceCreateInfoKHR
+  {
+    DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
+      : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , displayMode( displayMode_ )
+      , planeIndex( planeIndex_ )
+      , planeStackIndex( planeStackIndex_ )
+      , transform( transform_ )
+      , globalAlpha( globalAlpha_ )
+      , alphaMode( alphaMode_ )
+      , imageExtent( imageExtent_ )
+    {
+    }
+
+    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+    }
+
+    DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+    {
+      displayMode = displayMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+    {
+      planeStackIndex = planeStackIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+    {
+      globalAlpha = globalAlpha_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+    {
+      alphaMode = alphaMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    operator const VkDisplaySurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( displayMode == rhs.displayMode )
+          && ( planeIndex == rhs.planeIndex )
+          && ( planeStackIndex == rhs.planeStackIndex )
+          && ( transform == rhs.transform )
+          && ( globalAlpha == rhs.globalAlpha )
+          && ( alphaMode == rhs.alphaMode )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DisplaySurfaceCreateFlagsKHR flags;
+    DisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    SurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    DisplayPlaneAlphaFlagBitsKHR alphaMode;
+    Extent2D imageExtent;
+  };
+  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct SurfaceCapabilitiesKHR
+  {
+    operator const VkSurfaceCapabilitiesKHR&() const
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+    }
+
+    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+    }
+
+    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    Extent2D currentExtent;
+    Extent2D minImageExtent;
+    Extent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    SurfaceTransformFlagsKHR supportedTransforms;
+    SurfaceTransformFlagBitsKHR currentTransform;
+    CompositeAlphaFlagsKHR supportedCompositeAlpha;
+    ImageUsageFlags supportedUsageFlags;
+  };
+  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+  struct SwapchainCreateInfoKHR
+  {
+    SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
+      : sType( StructureType::eSwapchainCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , surface( surface_ )
+      , minImageCount( minImageCount_ )
+      , imageFormat( imageFormat_ )
+      , imageColorSpace( imageColorSpace_ )
+      , imageExtent( imageExtent_ )
+      , imageArrayLayers( imageArrayLayers_ )
+      , imageUsage( imageUsage_ )
+      , imageSharingMode( imageSharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+      , preTransform( preTransform_ )
+      , compositeAlpha( compositeAlpha_ )
+      , presentMode( presentMode_ )
+      , clipped( clipped_ )
+      , oldSwapchain( oldSwapchain_ )
+    {
+    }
+
+    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+    }
+
+    SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+    {
+      minImageCount = minImageCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+    {
+      imageColorSpace = imageColorSpace_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+    {
+      imageArrayLayers = imageArrayLayers_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+    {
+      imageSharingMode = imageSharingMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+    {
+      preTransform = preTransform_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+    {
+      compositeAlpha = compositeAlpha_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+    {
+      clipped = clipped_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+    {
+      oldSwapchain = oldSwapchain_;
+      return *this;
+    }
+
+    operator const VkSwapchainCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+    }
+
+    bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( surface == rhs.surface )
+          && ( minImageCount == rhs.minImageCount )
+          && ( imageFormat == rhs.imageFormat )
+          && ( imageColorSpace == rhs.imageColorSpace )
+          && ( imageExtent == rhs.imageExtent )
+          && ( imageArrayLayers == rhs.imageArrayLayers )
+          && ( imageUsage == rhs.imageUsage )
+          && ( imageSharingMode == rhs.imageSharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( preTransform == rhs.preTransform )
+          && ( compositeAlpha == rhs.compositeAlpha )
+          && ( presentMode == rhs.presentMode )
+          && ( clipped == rhs.clipped )
+          && ( oldSwapchain == rhs.oldSwapchain );
+    }
+
+    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SwapchainCreateFlagsKHR flags;
+    SurfaceKHR surface;
+    uint32_t minImageCount;
+    Format imageFormat;
+    ColorSpaceKHR imageColorSpace;
+    Extent2D imageExtent;
+    uint32_t imageArrayLayers;
+    ImageUsageFlags imageUsage;
+    SharingMode imageSharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    SurfaceTransformFlagBitsKHR preTransform;
+    CompositeAlphaFlagBitsKHR compositeAlpha;
+    PresentModeKHR presentMode;
+    Bool32 clipped;
+    SwapchainKHR oldSwapchain;
+  };
+  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class DebugReportFlagBitsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+  VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
+  {
+    return DebugReportFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
+  {
+    return ~( DebugReportFlagsEXT( bits ) );
+  }
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+    };
+  };
+
+  struct DebugReportCallbackCreateInfoEXT
+  {
+    DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
+      : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pfnCallback( pfnCallback_ )
+      , pUserData( pUserData_ )
+    {
+    }
+
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+    }
+
+    DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+    {
+      pfnCallback = pfnCallback_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    operator const VkDebugReportCallbackCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+    }
+
+    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportFlagsEXT flags;
+    PFN_vkDebugReportCallbackEXT pfnCallback;
+    void* pUserData;
+  };
+  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+    eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
+    eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT
+  };
+
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
+      : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
+      , pNext( nullptr )
+      , objectType( objectType_ )
+      , object( object_ )
+      , pObjectName( pObjectName_ )
+    {
+    }
+
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+    }
+
+    DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+    operator const VkDebugMarkerObjectNameInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+    }
+
+    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    const char* pObjectName;
+  };
+  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+
+  struct DebugMarkerObjectTagInfoEXT
+  {
+    DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
+      : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
+      , pNext( nullptr )
+      , objectType( objectType_ )
+      , object( object_ )
+      , tagName( tagName_ )
+      , tagSize( tagSize_ )
+      , pTag( pTag_ )
+    {
+    }
+
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+    }
+
+    DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+    operator const VkDebugMarkerObjectTagInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+    }
+
+    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+  };
+  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DebugReportErrorEXT
+  {
+    eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
+    eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
+  };
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  struct PipelineRasterizationStateRasterizationOrderAMD
+  {
+    PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
+      : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
+      , pNext( nullptr )
+      , rasterizationOrder( rasterizationOrder_ )
+    {
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+    {
+      rasterizationOrder = rasterizationOrder_;
+      return *this;
+    }
+
+    operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+    }
+
+    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrder == rhs.rasterizationOrder );
+    }
+
+    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RasterizationOrderAMD rasterizationOrder;
+  };
+  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
+  {
+    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+    };
+  };
+
+  struct ExternalMemoryImageCreateInfoNV
+  {
+    ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+      : sType( StructureType::eExternalMemoryImageCreateInfoNV )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
+    }
+
+    ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExternalMemoryImageCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
+    }
+
+    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct ExportMemoryAllocateInfoNV
+  {
+    ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+      : sType( StructureType::eExportMemoryAllocateInfoNV )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
+    }
+
+    ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExportMemoryAllocateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
+    }
+
+    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoNV
+  {
+    ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
+      : sType( StructureType::eImportMemoryWin32HandleInfoNV )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+      , handle( handle_ )
+    {
+    }
+
+    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
+    }
+
+    ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
+    {
+      sType = sType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    operator const VkImportMemoryWin32HandleInfoNV&() const
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+    }
+
+    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleType;
+    HANDLE handle;
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  enum class ExternalMemoryFeatureFlagBitsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
+  {
+    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    enum
     {
-      vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+    };
+  };
+
+  struct ExternalImageFormatPropertiesNV
+  {
+    operator const VkExternalImageFormatPropertiesNV&() const
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
     {
-      vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+      return ( imageFormatProperties == rhs.imageFormatProperties )
+          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
     {
-      vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+      return !operator==( rhs );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+    ImageFormatProperties imageFormatProperties;
+    ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
+    ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
+    ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+  };
+  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+
+  enum class ValidationCheckEXT
+  {
+    eAll = VK_VALIDATION_CHECK_ALL_EXT
+  };
+
+  struct ValidationFlagsEXT
+  {
+    ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
+      : sType( StructureType::eValidationFlagsEXT )
+      , pNext( nullptr )
+      , disabledValidationCheckCount( disabledValidationCheckCount_ )
+      , pDisabledValidationChecks( pDisabledValidationChecks_ )
     {
-      vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
     {
-      vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+      memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+    ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
     {
-      vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+      memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+    ValidationFlagsEXT& setSType( StructureType sType_ )
     {
-      vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+      sType = sType_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+    ValidationFlagsEXT& setPNext( const void* pNext_ )
     {
-      vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+      pNext = pNext_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
+    ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
     {
-      vkCmdDispatch( m_commandBuffer, x, y, z );
+      disabledValidationCheckCount = disabledValidationCheckCount_;
+      return *this;
+    }
+
+    ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+    {
+      pDisabledValidationChecks = pDisabledValidationChecks_;
+      return *this;
+    }
+
+    operator const VkValidationFlagsEXT&() const
+    {
+      return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+    }
+
+    bool operator==( ValidationFlagsEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+    }
+
+    bool operator!=( ValidationFlagsEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t disabledValidationCheckCount;
+    ValidationCheckEXT* pDisabledValidationChecks;
+  };
+  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNVX
+  {
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+    eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+    eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+  };
+
+  using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
+
+  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
+  {
+    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
+  {
+    return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+  }
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+    };
+  };
+
+  enum class ObjectEntryUsageFlagBitsNVX
+  {
+    eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+    eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+  };
+
+  using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
+
+  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
+  {
+    return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
+  {
+    return ~( ObjectEntryUsageFlagsNVX( bits ) );
+  }
+
+  template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
+    };
+  };
+
+  enum class IndirectCommandsTokenTypeNVX
+  {
+    eVkIndirectCommandsTokenPipeline = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
+    eVkIndirectCommandsTokenDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX,
+    eVkIndirectCommandsTokenIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX,
+    eVkIndirectCommandsTokenVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX,
+    eVkIndirectCommandsTokenPushConstant = VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX,
+    eVkIndirectCommandsTokenDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX,
+    eVkIndirectCommandsTokenDraw = VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX,
+    eVkIndirectCommandsTokenDispatch = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX
+  };
+
+  struct IndirectCommandsTokenNVX
+  {
+    IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
+      : tokenType( tokenType_ )
+      , buffer( buffer_ )
+      , offset( offset_ )
+    {
+    }
+
+    IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
+    }
+
+    IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator const VkIndirectCommandsTokenNVX&() const
+    {
+      return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
+    }
+
+    bool operator==( IndirectCommandsTokenNVX const& rhs ) const
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    IndirectCommandsTokenTypeNVX tokenType;
+    Buffer buffer;
+    DeviceSize offset;
+  };
+  static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
+
+  struct IndirectCommandsLayoutTokenNVX
+  {
+    IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
+      : tokenType( tokenType_ )
+      , bindingUnit( bindingUnit_ )
+      , dynamicCount( dynamicCount_ )
+      , divisor( divisor_ )
+    {
+    }
+
+    IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
+    }
+
+    IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
+    {
+      bindingUnit = bindingUnit_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
+    {
+      dynamicCount = dynamicCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
+    {
+      divisor = divisor_;
+      return *this;
+    }
+
+    operator const VkIndirectCommandsLayoutTokenNVX&() const
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
+    }
+
+    bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( bindingUnit == rhs.bindingUnit )
+          && ( dynamicCount == rhs.dynamicCount )
+          && ( divisor == rhs.divisor );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
+    bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
     {
-      vkCmdDispatch( m_commandBuffer, x, y, z );
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+    IndirectCommandsTokenTypeNVX tokenType;
+    uint32_t bindingUnit;
+    uint32_t dynamicCount;
+    uint32_t divisor;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
+
+  struct IndirectCommandsLayoutCreateInfoNVX
+  {
+    IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
+      : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
+      , pNext( nullptr )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , flags( flags_ )
+      , tokenCount( tokenCount_ )
+      , pTokens( pTokens_ )
     {
-      vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+    IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
     {
-      vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+      memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
+    IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
     {
-      vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
+      memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
+    IndirectCommandsLayoutCreateInfoNVX& setSType( StructureType sType_ )
     {
-      vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+      sType = sType_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
+    IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
     {
-      vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+      pNext = pNext_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
+    IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
     {
-      vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
+    IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
     {
-      vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+      flags = flags_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
+    IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
     {
-      vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+      tokenCount = tokenCount_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+    IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
     {
-      vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+      pTokens = pTokens_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
+    operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
     {
-      vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+    bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
     {
-      vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( flags == rhs.flags )
+          && ( tokenCount == rhs.tokenCount )
+          && ( pTokens == rhs.pTokens );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
+    bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
     {
-      vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineBindPoint pipelineBindPoint;
+    IndirectCommandsLayoutUsageFlagsNVX flags;
+    uint32_t tokenCount;
+    const IndirectCommandsLayoutTokenNVX* pTokens;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
+
+  enum class ObjectEntryTypeNVX
+  {
+    eVkObjectEntryDescriptorSet = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
+    eVkObjectEntryPipeline = VK_OBJECT_ENTRY_PIPELINE_NVX,
+    eVkObjectEntryIndexBuffer = VK_OBJECT_ENTRY_INDEX_BUFFER_NVX,
+    eVkObjectEntryVertexBuffer = VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX,
+    eVkObjectEntryPushConstant = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX
+  };
+
+  struct ObjectTableCreateInfoNVX
+  {
+    ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
+      : sType( StructureType::eObjectTableCreateInfoNVX )
+      , pNext( nullptr )
+      , objectCount( objectCount_ )
+      , pObjectEntryTypes( pObjectEntryTypes_ )
+      , pObjectEntryCounts( pObjectEntryCounts_ )
+      , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+      , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+      , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+      , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+      , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+      , maxPipelineLayouts( maxPipelineLayouts_ )
     {
-      vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T>
-    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
+    ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
     {
-      vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
+      memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
+    ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
     {
-      vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+      memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
+    ObjectTableCreateInfoNVX& setSType( StructureType sType_ )
     {
-      vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+      sType = sType_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+    ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
     {
-      vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+      pNext = pNext_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
+    ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
     {
-      vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+      objectCount = objectCount_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+    ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
     {
-      vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+      pObjectEntryTypes = pObjectEntryTypes_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
+    ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
     {
-      vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+      pObjectEntryCounts = pObjectEntryCounts_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
+    ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
     {
-      vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
+      pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
+    ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
     {
-      vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+      maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
+    ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
     {
-      vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
+      maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
+    ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
     {
-      vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
+      maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setEvent( Event event, PipelineStageFlags stageMask ) const
+    ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
     {
-      vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+      maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void setEvent( Event event, PipelineStageFlags stageMask ) const
+    ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
     {
-      vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+      maxPipelineLayouts = maxPipelineLayouts_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void resetEvent( Event event, PipelineStageFlags stageMask ) const
+    operator const VkObjectTableCreateInfoNVX&() const
     {
-      vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+      return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void resetEvent( Event event, PipelineStageFlags stageMask ) const
+    bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
     {
-      vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
+          && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
+          && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
+          && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
+          && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
+          && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
+          && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
+          && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+    bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
     {
-      vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+      return !operator==( rhs );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t objectCount;
+    const ObjectEntryTypeNVX* pObjectEntryTypes;
+    const uint32_t* pObjectEntryCounts;
+    const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+    uint32_t maxUniformBuffersPerDescriptor;
+    uint32_t maxStorageBuffersPerDescriptor;
+    uint32_t maxStorageImagesPerDescriptor;
+    uint32_t maxSampledImagesPerDescriptor;
+    uint32_t maxPipelineLayouts;
+  };
+  static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableEntryNVX
+  {
+    ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
+      : type( type_ )
+      , flags( flags_ )
     {
-      vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+    ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
     {
-      vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+      memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+    ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
     {
-      vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+      memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
+    ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+      type = type_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
+    ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
-      vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+      flags = flags_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void endQuery( QueryPool queryPool, uint32_t query ) const
+    operator const VkObjectTableEntryNVX&() const
     {
-      vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+      return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void endQuery( QueryPool queryPool, uint32_t query ) const
+    bool operator==( ObjectTableEntryNVX const& rhs ) const
     {
-      vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+      return ( type == rhs.type )
+          && ( flags == rhs.flags );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+    bool operator!=( ObjectTableEntryNVX const& rhs ) const
     {
-      vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+      return !operator==( rhs );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+  };
+  static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTablePipelineEntryNVX
+  {
+    ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipeline( pipeline_ )
     {
-      vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
+    ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
     {
-      vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+      memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
+    ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
     {
-      vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+      memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
+    ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+      type = type_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
+    ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
-      vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+      flags = flags_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
+    ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
     {
-      vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+      pipeline = pipeline_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T>
-    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
+    operator const VkObjectTablePipelineEntryNVX&() const
     {
-      vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+      return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
+    bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
     {
-      vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipeline == rhs.pipeline );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
+    bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
     {
-      vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void nextSubpass( SubpassContents contents ) const
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Pipeline pipeline;
+  };
+  static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableDescriptorSetEntryNVX
+  {
+    ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , descriptorSet( descriptorSet_ )
     {
-      vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void nextSubpass( SubpassContents contents ) const
+    ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
     {
-      vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+      memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void endRenderPass(  ) const
+    ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
     {
-      vkCmdEndRenderPass( m_commandBuffer );
+      memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void endRenderPass() const
+    ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      vkCmdEndRenderPass( m_commandBuffer );
+      type = type_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
+    ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
-      vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+      flags = flags_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
+    ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
     {
-      vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+      pipelineLayout = pipelineLayout_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+    ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
     {
-      vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+      descriptorSet = descriptorSet_;
+      return *this;
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
+    operator const VkObjectTableDescriptorSetEntryNVX&() const
     {
-      DebugMarkerMarkerInfoEXT markerInfo;
-      vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
-      return markerInfo;
+      return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void debugMarkerEndEXT(  ) const
+    bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
     {
-      vkCmdDebugMarkerEndEXT( m_commandBuffer );
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( descriptorSet == rhs.descriptorSet );
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void debugMarkerEndEXT() const
+    bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
     {
-      vkCmdDebugMarkerEndEXT( m_commandBuffer );
+      return !operator==( rhs );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    PipelineLayout pipelineLayout;
+    DescriptorSet descriptorSet;
+  };
+  static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableVertexBufferEntryNVX
+  {
+    ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
     {
-      vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
     }
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
+    ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
     {
-      DebugMarkerMarkerInfoEXT markerInfo;
-      vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
-      return markerInfo;
+      memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+    ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
     {
-      vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+      memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+    ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+      type = type_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+    ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
-      vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+      flags = flags_;
+      return *this;
     }
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+    ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
     {
-      vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+      buffer = buffer_;
+      return *this;
     }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    explicit
-#endif
-    operator VkCommandBuffer() const
+    operator const VkObjectTableVertexBufferEntryNVX&() const
     {
-      return m_commandBuffer;
+      return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
     }
 
-    explicit operator bool() const
+    bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
     {
-      return m_commandBuffer != VK_NULL_HANDLE;
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer );
     }
 
-    bool operator!() const
+    bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
     {
-      return m_commandBuffer == VK_NULL_HANDLE;
+      return !operator==( rhs );
     }
 
-  private:
-    VkCommandBuffer m_commandBuffer;
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Buffer buffer;
   };
-  static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+  static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
 
-  struct SubpassDependency
+  struct ObjectTableIndexBufferEntryNVX
   {
-    SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
-      : srcSubpass( srcSubpass_ )
-      , dstSubpass( dstSubpass_ )
-      , srcStageMask( srcStageMask_ )
-      , dstStageMask( dstStageMask_ )
-      , srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-      , dependencyFlags( dependencyFlags_ )
+    ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
     {
     }
 
-    SubpassDependency( VkSubpassDependency const & rhs )
+    ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(SubpassDependency) );
+      memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
     }
 
-    SubpassDependency& operator=( VkSubpassDependency const & rhs )
+    ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(SubpassDependency) );
+      memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
       return *this;
     }
 
-    SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
+    ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      srcSubpass = srcSubpass_;
+      type = type_;
       return *this;
     }
 
-    SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
+    ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
-      dstSubpass = dstSubpass_;
+      flags = flags_;
       return *this;
     }
 
-    SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
+    ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
     {
-      srcStageMask = srcStageMask_;
+      buffer = buffer_;
       return *this;
     }
 
-    SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
+    operator const VkObjectTableIndexBufferEntryNVX&() const
     {
-      dstStageMask = dstStageMask_;
-      return *this;
+      return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
     }
 
-    SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
+    bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
     {
-      srcAccessMask = srcAccessMask_;
-      return *this;
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer );
     }
 
-    SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
+    bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
     {
-      dstAccessMask = dstAccessMask_;
-      return *this;
+      return !operator==( rhs );
     }
 
-    SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Buffer buffer;
+  };
+  static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTablePushConstantEntryNVX
+  {
+    ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , stageFlags( stageFlags_ )
     {
-      dependencyFlags = dependencyFlags_;
-      return *this;
     }
 
-    operator const VkSubpassDependency&() const
+    ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
     {
-      return *reinterpret_cast<const VkSubpassDependency*>(this);
+      memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
     }
 
-    bool operator==( SubpassDependency const& rhs ) const
+    ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
     {
-      return ( srcSubpass == rhs.srcSubpass )
-          && ( dstSubpass == rhs.dstSubpass )
-          && ( srcStageMask == rhs.srcStageMask )
-          && ( dstStageMask == rhs.dstStageMask )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( dependencyFlags == rhs.dependencyFlags );
+      memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
+      return *this;
     }
 
-    bool operator!=( SubpassDependency const& rhs ) const
+    ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
     {
-      return !operator==( rhs );
+      type = type_;
+      return *this;
     }
 
-    uint32_t srcSubpass;
-    uint32_t dstSubpass;
-    PipelineStageFlags srcStageMask;
-    PipelineStageFlags dstStageMask;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-    DependencyFlags dependencyFlags;
-  };
-  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
-
-  struct RenderPassCreateInfo
-  {
-    RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
-      : sType( StructureType::eRenderPassCreateInfo )
-      , pNext( nullptr )
-      , flags( flags_ )
-      , attachmentCount( attachmentCount_ )
-      , pAttachments( pAttachments_ )
-      , subpassCount( subpassCount_ )
-      , pSubpasses( pSubpasses_ )
-      , dependencyCount( dependencyCount_ )
-      , pDependencies( pDependencies_ )
+    ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
     {
+      flags = flags_;
+      return *this;
     }
 
-    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
+    ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
     {
-      memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+      pipelineLayout = pipelineLayout_;
+      return *this;
     }
 
-    RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
+    ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
     {
-      memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+      stageFlags = stageFlags_;
       return *this;
     }
 
-    RenderPassCreateInfo& setSType( StructureType sType_ )
+    operator const VkObjectTablePushConstantEntryNVX&() const
     {
-      sType = sType_;
-      return *this;
+      return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
     }
 
-    RenderPassCreateInfo& setPNext( const void* pNext_ )
+    bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
     {
-      pNext = pNext_;
-      return *this;
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( stageFlags == rhs.stageFlags );
     }
 
-    RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
+    bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    PipelineLayout pipelineLayout;
+    ShaderStageFlags stageFlags;
+  };
+  static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
+  {
+    return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator = std::allocator<LayerProperties>>
+  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
+  {
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() ); 
+    properties.resize( propertyCount ); 
+    return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
+  {
+    return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator = std::allocator<ExtensionProperties>>
+  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
+  {
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() ); 
+    properties.resize( propertyCount ); 
+    return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  // forward declarations
+  struct CmdProcessCommandsInfoNVX;
 
-    RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
-    {
-      attachmentCount = attachmentCount_;
-      return *this;
-    }
+  class CommandBuffer
+  {
+  public:
+    CommandBuffer()
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
 
-    RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
-    {
-      pAttachments = pAttachments_;
-      return *this;
-    }
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandBuffer(VkCommandBuffer commandBuffer)
+       : m_commandBuffer(commandBuffer)
+    {}
 
-    RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
+    CommandBuffer& operator=(VkCommandBuffer commandBuffer)
     {
-      subpassCount = subpassCount_;
+      m_commandBuffer = commandBuffer;
       return *this;
     }
+#endif
 
-    RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
+    bool operator==(CommandBuffer const &rhs) const
     {
-      pSubpasses = pSubpasses_;
-      return *this;
+      return m_commandBuffer == rhs.m_commandBuffer;
     }
 
-    RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
+    bool operator!=(CommandBuffer const &rhs) const
     {
-      dependencyCount = dependencyCount_;
-      return *this;
+      return m_commandBuffer != rhs.m_commandBuffer;
     }
 
-    RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
+    bool operator<(CommandBuffer const &rhs) const
     {
-      pDependencies = pDependencies_;
-      return *this;
+      return m_commandBuffer < rhs.m_commandBuffer;
     }
 
-    operator const VkRenderPassCreateInfo&() const
+    Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
     {
-      return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
+      return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
     }
 
-    bool operator==( RenderPassCreateInfo const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( attachmentCount == rhs.attachmentCount )
-          && ( pAttachments == rhs.pAttachments )
-          && ( subpassCount == rhs.subpassCount )
-          && ( pSubpasses == rhs.pSubpasses )
-          && ( dependencyCount == rhs.dependencyCount )
-          && ( pDependencies == rhs.pDependencies );
+      Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
+      return createResultValue( result, "vk::CommandBuffer::begin" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( RenderPassCreateInfo const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result end(  ) const
     {
-      return !operator==( rhs );
+      return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    RenderPassCreateFlags flags;
-    uint32_t attachmentCount;
-    const AttachmentDescription* pAttachments;
-    uint32_t subpassCount;
-    const SubpassDescription* pSubpasses;
-    uint32_t dependencyCount;
-    const SubpassDependency* pDependencies;
-  };
-  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
-
-  struct SubmitInfo
-  {
-    SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
-      : sType( StructureType::eSubmitInfo )
-      , pNext( nullptr )
-      , waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphores( pWaitSemaphores_ )
-      , pWaitDstStageMask( pWaitDstStageMask_ )
-      , commandBufferCount( commandBufferCount_ )
-      , pCommandBuffers( pCommandBuffers_ )
-      , signalSemaphoreCount( signalSemaphoreCount_ )
-      , pSignalSemaphores( pSignalSemaphores_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type end() const
     {
+      Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+      return createResultValue( result, "vk::CommandBuffer::end" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo( VkSubmitInfo const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result reset( CommandBufferResetFlags flags ) const
     {
-      memcpy( this, &rhs, sizeof(SubmitInfo) );
+      return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& operator=( VkSubmitInfo const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
     {
-      memcpy( this, &rhs, sizeof(SubmitInfo) );
-      return *this;
+      Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+      return createResultValue( result, "vk::CommandBuffer::reset" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setSType( StructureType sType_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
     {
-      sType = sType_;
-      return *this;
+      vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setPNext( const void* pNext_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
     {
-      pNext = pNext_;
-      return *this;
+      vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
     {
-      waitSemaphoreCount = waitSemaphoreCount_;
-      return *this;
+      vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
     }
 
-    SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
     {
-      pWaitSemaphores = pWaitSemaphores_;
-      return *this;
+      vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
     {
-      pWaitDstStageMask = pWaitDstStageMask_;
-      return *this;
+      vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
     }
 
-    SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
     {
-      commandBufferCount = commandBufferCount_;
-      return *this;
+      vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setLineWidth( float lineWidth ) const
     {
-      pCommandBuffers = pCommandBuffers_;
-      return *this;
+      vkCmdSetLineWidth( m_commandBuffer, lineWidth );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setLineWidth( float lineWidth ) const
     {
-      signalSemaphoreCount = signalSemaphoreCount_;
-      return *this;
+      vkCmdSetLineWidth( m_commandBuffer, lineWidth );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
     {
-      pSignalSemaphores = pSignalSemaphores_;
-      return *this;
+      vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkSubmitInfo&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
     {
-      return *reinterpret_cast<const VkSubmitInfo*>(this);
+      vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( SubmitInfo const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setBlendConstants( const float blendConstants[4] ) const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
-          && ( pWaitSemaphores == rhs.pWaitSemaphores )
-          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
-          && ( commandBufferCount == rhs.commandBufferCount )
-          && ( pCommandBuffers == rhs.pCommandBuffers )
-          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
-          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+      vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( SubmitInfo const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setBlendConstants( const float blendConstants[4] ) const
     {
-      return !operator==( rhs );
+      vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    uint32_t waitSemaphoreCount;
-    const Semaphore* pWaitSemaphores;
-    const PipelineStageFlags* pWaitDstStageMask;
-    uint32_t commandBufferCount;
-    const CommandBuffer* pCommandBuffers;
-    uint32_t signalSemaphoreCount;
-    const Semaphore* pSignalSemaphores;
-  };
-  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
-
-  class Queue
-  {
-  public:
-    Queue()
-      : m_queue(VK_NULL_HANDLE)
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Queue(VkQueue queue)
-       : m_queue(queue)
-    {}
-
-    Queue& operator=(VkQueue queue)
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
     {
-      m_queue = queue;
-      return *this;
+      vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
     }
-#endif
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==(Queue const &rhs) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
     {
-      return m_queue == rhs.m_queue;
+      vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=(Queue const &rhs) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
     {
-      return m_queue != rhs.m_queue;
+      vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator<(Queue const &rhs) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
     {
-      return m_queue < rhs.m_queue;
+      vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
     {
-      return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+      vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
+    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
     {
-      Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
-      return createResultValue( result, "vk::Queue::submit" );
+      vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    Result waitIdle(  ) const
+    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
     {
-      return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+      vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
     }
 #endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type waitIdle() const
+    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
     {
-      Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
-      return createResultValue( result, "vk::Queue::waitIdle" );
+      vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
+    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
     {
-      return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+      vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
     }
 
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
+    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
     {
-      Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
-      return createResultValue( result, "vk::Queue::bindSparse" );
+      vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
     {
-      return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
+      vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    Result presentKHR( const PresentInfoKHR & presentInfo ) const
+    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
     {
-      Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
-      return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+      vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    explicit
-#endif
-    operator VkQueue() const
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
     {
-      return m_queue;
+      vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
     }
 
-    explicit operator bool() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
     {
-      return m_queue != VK_NULL_HANDLE;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      assert( buffers.size() == offsets.size() );
+#else
+      if ( buffers.size() != offsets.size() )
+      {
+        throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+      }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+      vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
     {
-      return m_queue == VK_NULL_HANDLE;
+      vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  private:
-    VkQueue m_queue;
-  };
-  static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
-
-  enum class PresentModeKHR
-  {
-    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
-    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
-    eFifo = VK_PRESENT_MODE_FIFO_KHR,
-    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
-  };
-
-  enum class ColorSpaceKHR
-  {
-    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
-  };
-
-  struct SurfaceFormatKHR
-  {
-    operator const VkSurfaceFormatKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
     {
-      return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
+      vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( SurfaceFormatKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
     {
-      return ( format == rhs.format )
-          && ( colorSpace == rhs.colorSpace );
+      vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( SurfaceFormatKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
     {
-      return !operator==( rhs );
+      vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    Format format;
-    ColorSpaceKHR colorSpace;
-  };
-  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
-
-  enum class DisplayPlaneAlphaFlagBitsKHR
-  {
-    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
-    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
-    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
-    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
-  };
-
-  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
-
-  inline DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
-  {
-    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
-  }
-
-  struct DisplayPlaneCapabilitiesKHR
-  {
-    operator const VkDisplayPlaneCapabilitiesKHR&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
     {
-      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
+      vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
     {
-      return ( supportedAlpha == rhs.supportedAlpha )
-          && ( minSrcPosition == rhs.minSrcPosition )
-          && ( maxSrcPosition == rhs.maxSrcPosition )
-          && ( minSrcExtent == rhs.minSrcExtent )
-          && ( maxSrcExtent == rhs.maxSrcExtent )
-          && ( minDstPosition == rhs.minDstPosition )
-          && ( maxDstPosition == rhs.maxDstPosition )
-          && ( minDstExtent == rhs.minDstExtent )
-          && ( maxDstExtent == rhs.maxDstExtent );
+      vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
     {
-      return !operator==( rhs );
+      vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplayPlaneAlphaFlagsKHR supportedAlpha;
-    Offset2D minSrcPosition;
-    Offset2D maxSrcPosition;
-    Extent2D minSrcExtent;
-    Extent2D maxSrcExtent;
-    Offset2D minDstPosition;
-    Offset2D maxDstPosition;
-    Extent2D minDstExtent;
-    Extent2D maxDstExtent;
-  };
-  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
-
-  enum class CompositeAlphaFlagBitsKHR
-  {
-    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
-    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
-    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
-    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
-  };
-
-  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
-
-  inline CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
-  {
-    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
-  }
-
-  enum class SurfaceTransformFlagBitsKHR
-  {
-    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
-    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
-    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
-    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
-    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
-    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
-    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
-    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
-    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
-  };
-
-  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
-
-  inline SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
-  {
-    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
-  }
-
-  struct DisplayPropertiesKHR
-  {
-    operator const VkDisplayPropertiesKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
     {
-      return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+      vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( DisplayPropertiesKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
     {
-      return ( display == rhs.display )
-          && ( displayName == rhs.displayName )
-          && ( physicalDimensions == rhs.physicalDimensions )
-          && ( physicalResolution == rhs.physicalResolution )
-          && ( supportedTransforms == rhs.supportedTransforms )
-          && ( planeReorderPossible == rhs.planeReorderPossible )
-          && ( persistentContent == rhs.persistentContent );
+      vkCmdDispatch( m_commandBuffer, x, y, z );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( DisplayPropertiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
     {
-      return !operator==( rhs );
+      vkCmdDispatch( m_commandBuffer, x, y, z );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplayKHR display;
-    const char* displayName;
-    Extent2D physicalDimensions;
-    Extent2D physicalResolution;
-    SurfaceTransformFlagsKHR supportedTransforms;
-    Bool32 planeReorderPossible;
-    Bool32 persistentContent;
-  };
-  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+    {
+      vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+    }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  struct DisplaySurfaceCreateInfoKHR
-  {
-    DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
-      : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
-      , pNext( nullptr )
-      , flags( flags_ )
-      , displayMode( displayMode_ )
-      , planeIndex( planeIndex_ )
-      , planeStackIndex( planeStackIndex_ )
-      , transform( transform_ )
-      , globalAlpha( globalAlpha_ )
-      , alphaMode( alphaMode_ )
-      , imageExtent( imageExtent_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
     {
+      vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
     {
-      memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+      vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
     }
 
-    DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
     {
-      memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
-      return *this;
+      vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
+    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
     {
-      sType = sType_;
-      return *this;
+      vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
     }
 
-    DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
     {
-      pNext = pNext_;
-      return *this;
+      vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
     {
-      flags = flags_;
-      return *this;
+      vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
     }
 
-    DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
     {
-      displayMode = displayMode_;
-      return *this;
+      vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
     {
-      planeIndex = planeIndex_;
-      return *this;
+      vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
     }
 
-    DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
     {
-      planeStackIndex = planeStackIndex_;
-      return *this;
+      vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
     {
-      transform = transform_;
-      return *this;
+      vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
     }
 
-    DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
     {
-      globalAlpha = globalAlpha_;
-      return *this;
+      vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
     {
-      alphaMode = alphaMode_;
-      return *this;
+      vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
     }
 
-    DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T>
+    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
     {
-      imageExtent = imageExtent_;
-      return *this;
+      vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkDisplaySurfaceCreateInfoKHR&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
     {
-      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+      vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( displayMode == rhs.displayMode )
-          && ( planeIndex == rhs.planeIndex )
-          && ( planeStackIndex == rhs.planeStackIndex )
-          && ( transform == rhs.transform )
-          && ( globalAlpha == rhs.globalAlpha )
-          && ( alphaMode == rhs.alphaMode )
-          && ( imageExtent == rhs.imageExtent );
+      vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
     {
-      return !operator==( rhs );
+      vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
     }
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    DisplaySurfaceCreateFlagsKHR flags;
-    DisplayModeKHR displayMode;
-    uint32_t planeIndex;
-    uint32_t planeStackIndex;
-    SurfaceTransformFlagBitsKHR transform;
-    float globalAlpha;
-    DisplayPlaneAlphaFlagBitsKHR alphaMode;
-    Extent2D imageExtent;
-  };
-  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
+    {
+      vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  struct SurfaceCapabilitiesKHR
-  {
-    operator const VkSurfaceCapabilitiesKHR&() const
+    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
     {
-      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+      vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
     }
 
-    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
     {
-      return ( minImageCount == rhs.minImageCount )
-          && ( maxImageCount == rhs.maxImageCount )
-          && ( currentExtent == rhs.currentExtent )
-          && ( minImageExtent == rhs.minImageExtent )
-          && ( maxImageExtent == rhs.maxImageExtent )
-          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
-          && ( supportedTransforms == rhs.supportedTransforms )
-          && ( currentTransform == rhs.currentTransform )
-          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
-          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+      vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+    void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
     {
-      return !operator==( rhs );
+      vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
     }
 
-    uint32_t minImageCount;
-    uint32_t maxImageCount;
-    Extent2D currentExtent;
-    Extent2D minImageExtent;
-    Extent2D maxImageExtent;
-    uint32_t maxImageArrayLayers;
-    SurfaceTransformFlagsKHR supportedTransforms;
-    SurfaceTransformFlagBitsKHR currentTransform;
-    CompositeAlphaFlagsKHR supportedCompositeAlpha;
-    ImageUsageFlags supportedUsageFlags;
-  };
-  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
+    {
+      vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  struct SwapchainCreateInfoKHR
-  {
-    SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
-      : sType( StructureType::eSwapchainCreateInfoKHR )
-      , pNext( nullptr )
-      , flags( flags_ )
-      , surface( surface_ )
-      , minImageCount( minImageCount_ )
-      , imageFormat( imageFormat_ )
-      , imageColorSpace( imageColorSpace_ )
-      , imageExtent( imageExtent_ )
-      , imageArrayLayers( imageArrayLayers_ )
-      , imageUsage( imageUsage_ )
-      , imageSharingMode( imageSharingMode_ )
-      , queueFamilyIndexCount( queueFamilyIndexCount_ )
-      , pQueueFamilyIndices( pQueueFamilyIndices_ )
-      , preTransform( preTransform_ )
-      , compositeAlpha( compositeAlpha_ )
-      , presentMode( presentMode_ )
-      , clipped( clipped_ )
-      , oldSwapchain( oldSwapchain_ )
+    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
     {
+      vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
     }
 
-    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
     {
-      memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+      vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setEvent( Event event, PipelineStageFlags stageMask ) const
     {
-      memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
-      return *this;
+      vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setEvent( Event event, PipelineStageFlags stageMask ) const
     {
-      sType = sType_;
-      return *this;
+      vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resetEvent( Event event, PipelineStageFlags stageMask ) const
     {
-      pNext = pNext_;
-      return *this;
+      vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resetEvent( Event event, PipelineStageFlags stageMask ) const
     {
-      flags = flags_;
-      return *this;
+      vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+    void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
     {
-      surface = surface_;
-      return *this;
+      vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
     }
 
-    SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
     {
-      minImageCount = minImageCount_;
-      return *this;
+      vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
     {
-      imageFormat = imageFormat_;
-      return *this;
+      vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
     }
 
-    SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
     {
-      imageColorSpace = imageColorSpace_;
-      return *this;
+      vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
     {
-      imageExtent = imageExtent_;
-      return *this;
+      vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
     {
-      imageArrayLayers = imageArrayLayers_;
-      return *this;
+      vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void endQuery( QueryPool queryPool, uint32_t query ) const
     {
-      imageUsage = imageUsage_;
-      return *this;
+      vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void endQuery( QueryPool queryPool, uint32_t query ) const
     {
-      imageSharingMode = imageSharingMode_;
-      return *this;
+      vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
     {
-      queueFamilyIndexCount = queueFamilyIndexCount_;
-      return *this;
+      vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
     {
-      pQueueFamilyIndices = pQueueFamilyIndices_;
-      return *this;
+      vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
     {
-      preTransform = preTransform_;
-      return *this;
+      vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
     {
-      compositeAlpha = compositeAlpha_;
-      return *this;
+      vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
     {
-      presentMode = presentMode_;
-      return *this;
+      vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
     {
-      clipped = clipped_;
-      return *this;
+      vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
     {
-      oldSwapchain = oldSwapchain_;
-      return *this;
+      vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
     }
 
-    operator const VkSwapchainCreateInfoKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T>
+    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
     {
-      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+      vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+    void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( surface == rhs.surface )
-          && ( minImageCount == rhs.minImageCount )
-          && ( imageFormat == rhs.imageFormat )
-          && ( imageColorSpace == rhs.imageColorSpace )
-          && ( imageExtent == rhs.imageExtent )
-          && ( imageArrayLayers == rhs.imageArrayLayers )
-          && ( imageUsage == rhs.imageUsage )
-          && ( imageSharingMode == rhs.imageSharingMode )
-          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
-          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
-          && ( preTransform == rhs.preTransform )
-          && ( compositeAlpha == rhs.compositeAlpha )
-          && ( presentMode == rhs.presentMode )
-          && ( clipped == rhs.clipped )
-          && ( oldSwapchain == rhs.oldSwapchain );
+      vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
     }
 
-    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
     {
-      return !operator==( rhs );
+      vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    SwapchainCreateFlagsKHR flags;
-    SurfaceKHR surface;
-    uint32_t minImageCount;
-    Format imageFormat;
-    ColorSpaceKHR imageColorSpace;
-    Extent2D imageExtent;
-    uint32_t imageArrayLayers;
-    ImageUsageFlags imageUsage;
-    SharingMode imageSharingMode;
-    uint32_t queueFamilyIndexCount;
-    const uint32_t* pQueueFamilyIndices;
-    SurfaceTransformFlagBitsKHR preTransform;
-    CompositeAlphaFlagBitsKHR compositeAlpha;
-    PresentModeKHR presentMode;
-    Bool32 clipped;
-    SwapchainKHR oldSwapchain;
-  };
-  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class DebugReportFlagBitsEXT
-  {
-    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
-    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
-    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
-    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
-    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
-  };
-
-  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
-
-  inline DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
-  {
-    return DebugReportFlagsEXT( bit0 ) | bit1;
-  }
-
-  struct DebugReportCallbackCreateInfoEXT
-  {
-    DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
-      : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
-      , pNext( nullptr )
-      , flags( flags_ )
-      , pfnCallback( pfnCallback_ )
-      , pUserData( pUserData_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void nextSubpass( SubpassContents contents ) const
     {
+      vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void nextSubpass( SubpassContents contents ) const
     {
-      memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+      vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void endRenderPass(  ) const
     {
-      memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
-      return *this;
+      vkCmdEndRenderPass( m_commandBuffer );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void endRenderPass() const
     {
-      sType = sType_;
-      return *this;
+      vkCmdEndRenderPass( m_commandBuffer );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+    void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
     {
-      pNext = pNext_;
-      return *this;
+      vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
     }
 
-    DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
     {
-      flags = flags_;
-      return *this;
+      vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+    void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
     {
-      pfnCallback = pfnCallback_;
-      return *this;
+      vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
     }
 
-    DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
     {
-      pUserData = pUserData_;
-      return *this;
+      DebugMarkerMarkerInfoEXT markerInfo;
+      vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+      return markerInfo;
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkDebugReportCallbackCreateInfoEXT&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void debugMarkerEndEXT(  ) const
     {
-      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+      vkCmdDebugMarkerEndEXT( m_commandBuffer );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void debugMarkerEndEXT() const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pfnCallback == rhs.pfnCallback )
-          && ( pUserData == rhs.pUserData );
+      vkCmdDebugMarkerEndEXT( m_commandBuffer );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+    void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
     {
-      return !operator==( rhs );
+      vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
     }
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    DebugReportFlagsEXT flags;
-    PFN_vkDebugReportCallbackEXT pfnCallback;
-    void* pUserData;
-  };
-  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DebugReportObjectTypeEXT
-  {
-    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
-    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
-    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
-    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
-    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
-    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
-    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
-    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
-    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
-    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
-    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
-    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
-    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
-    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
-    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
-    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
-    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
-    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
-    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
-    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
-    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
-    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
-    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
-    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
-    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
-    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
-    eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
-    eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
-    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT
-  };
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
+    {
+      DebugMarkerMarkerInfoEXT markerInfo;
+      vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+      return markerInfo;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  struct DebugMarkerObjectNameInfoEXT
-  {
-    DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
-      : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
-      , pNext( nullptr )
-      , objectType( objectType_ )
-      , object( object_ )
-      , pObjectName( pObjectName_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
     {
+      vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
     {
-      memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+      vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
     {
-      memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
-      return *this;
+      vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
     }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
     {
-      sType = sType_;
-      return *this;
+      vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+    void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
     {
-      pNext = pNext_;
-      return *this;
+      vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
     }
 
-    DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
     {
-      objectType = objectType_;
-      return *this;
+      vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
     {
-      object = object_;
-      return *this;
+      vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
     }
 
-    DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
     {
-      pObjectName = pObjectName_;
-      return *this;
+      vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkDebugMarkerObjectNameInfoEXT&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    explicit
+#endif
+    operator VkCommandBuffer() const
     {
-      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+      return m_commandBuffer;
     }
 
-    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    explicit operator bool() const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( pObjectName == rhs.pObjectName );
+      return m_commandBuffer != VK_NULL_HANDLE;
     }
 
-    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    bool operator!() const
     {
-      return !operator==( rhs );
+      return m_commandBuffer == VK_NULL_HANDLE;
     }
 
   private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    DebugReportObjectTypeEXT objectType;
-    uint64_t object;
-    const char* pObjectName;
+    VkCommandBuffer m_commandBuffer;
   };
-  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
 
-  struct DebugMarkerObjectTagInfoEXT
+  struct SubmitInfo
   {
-    DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
-      : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
+    SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+      : sType( StructureType::eSubmitInfo )
       , pNext( nullptr )
-      , objectType( objectType_ )
-      , object( object_ )
-      , tagName( tagName_ )
-      , tagSize( tagSize_ )
-      , pTag( pTag_ )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , pWaitDstStageMask( pWaitDstStageMask_ )
+      , commandBufferCount( commandBufferCount_ )
+      , pCommandBuffers( pCommandBuffers_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphores( pSignalSemaphores_ )
     {
     }
 
-    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    SubmitInfo( VkSubmitInfo const & rhs )
     {
-      memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+      memcpy( this, &rhs, sizeof(SubmitInfo) );
     }
 
-    DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    SubmitInfo& operator=( VkSubmitInfo const & rhs )
     {
-      memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+      memcpy( this, &rhs, sizeof(SubmitInfo) );
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
+    SubmitInfo& setSType( StructureType sType_ )
     {
       sType = sType_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+    SubmitInfo& setPNext( const void* pNext_ )
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+    SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
     {
-      objectType = objectType_;
+      waitSemaphoreCount = waitSemaphoreCount_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+    SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
     {
-      object = object_;
+      pWaitSemaphores = pWaitSemaphores_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+    SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
     {
-      tagName = tagName_;
+      pWaitDstStageMask = pWaitDstStageMask_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+    SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
     {
-      tagSize = tagSize_;
+      commandBufferCount = commandBufferCount_;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+    SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
     {
-      pTag = pTag_;
+      pCommandBuffers = pCommandBuffers_;
       return *this;
     }
 
-    operator const VkDebugMarkerObjectTagInfoEXT&() const
+    SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
     {
-      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
     }
 
-    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+    operator const VkSubmitInfo&() const
+    {
+      return *reinterpret_cast<const VkSubmitInfo*>(this);
+    }
+
+    bool operator==( SubmitInfo const& rhs ) const
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( tagName == rhs.tagName )
-          && ( tagSize == rhs.tagSize )
-          && ( pTag == rhs.pTag );
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBuffers == rhs.pCommandBuffers )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
     }
 
-    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    bool operator!=( SubmitInfo const& rhs ) const
     {
       return !operator==( rhs );
     }
@@ -15571,104 +17635,126 @@ namespace vk
 
   public:
     const void* pNext;
-    DebugReportObjectTypeEXT objectType;
-    uint64_t object;
-    uint64_t tagName;
-    size_t tagSize;
-    const void* pTag;
+    uint32_t waitSemaphoreCount;
+    const Semaphore* pWaitSemaphores;
+    const PipelineStageFlags* pWaitDstStageMask;
+    uint32_t commandBufferCount;
+    const CommandBuffer* pCommandBuffers;
+    uint32_t signalSemaphoreCount;
+    const Semaphore* pSignalSemaphores;
   };
-  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
 
-  enum class DebugReportErrorEXT
+  class Queue
   {
-    eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
-    eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
-  };
+  public:
+    Queue()
+      : m_queue(VK_NULL_HANDLE)
+    {}
 
-  enum class RasterizationOrderAMD
-  {
-    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
-    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
-  };
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Queue(VkQueue queue)
+       : m_queue(queue)
+    {}
 
-  struct PipelineRasterizationStateRasterizationOrderAMD
-  {
-    PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
-      : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
-      , pNext( nullptr )
-      , rasterizationOrder( rasterizationOrder_ )
+    Queue& operator=(VkQueue queue)
+    {
+      m_queue = queue;
+      return *this;
+    }
+#endif
+
+    bool operator==(Queue const &rhs) const
     {
+      return m_queue == rhs.m_queue;
     }
 
-    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    bool operator!=(Queue const &rhs) const
     {
-      memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+      return m_queue != rhs.m_queue;
     }
 
-    PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    bool operator<(Queue const &rhs) const
     {
-      memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
-      return *this;
+      return m_queue < rhs.m_queue;
     }
 
-    PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
+    Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
+    {
+      return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+    }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
+    {
+      Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
+      return createResultValue( result, "vk::Queue::submit" );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result waitIdle(  ) const
+    {
+      return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+    }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type waitIdle() const
+    {
+      Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+      return createResultValue( result, "vk::Queue::waitIdle" );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
+    {
+      return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+    }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
     {
-      sType = sType_;
-      return *this;
+      Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+      return createResultValue( result, "vk::Queue::bindSparse" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+    Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
     {
-      pNext = pNext_;
-      return *this;
+      return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
     }
 
-    PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result presentKHR( const PresentInfoKHR & presentInfo ) const
     {
-      rasterizationOrder = rasterizationOrder_;
-      return *this;
+      Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
+      return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    explicit
+#endif
+    operator VkQueue() const
     {
-      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+      return m_queue;
     }
 
-    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    explicit operator bool() const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( rasterizationOrder == rhs.rasterizationOrder );
+      return m_queue != VK_NULL_HANDLE;
     }
 
-    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    bool operator!() const
     {
-      return !operator==( rhs );
+      return m_queue == VK_NULL_HANDLE;
     }
 
   private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    RasterizationOrderAMD rasterizationOrder;
-  };
-  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
-
-  enum class ExternalMemoryHandleTypeFlagBitsNV
-  {
-    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
-    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
-    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
-    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+    VkQueue m_queue;
   };
-
-  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
-
-  inline ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
-  {
-    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
-  }
+  static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
 
   class Device
   {
@@ -16827,315 +18913,122 @@ namespace vk
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    explicit
-#endif
-    operator VkDevice() const
-    {
-      return m_device;
-    }
-
-    explicit operator bool() const
-    {
-      return m_device != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_device == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDevice m_device;
-  };
-  static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
-
-  struct ExternalMemoryImageCreateInfoNV
-  {
-    ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
-      : sType( StructureType::eExternalMemoryImageCreateInfoNV )
-      , pNext( nullptr )
-      , handleTypes( handleTypes_ )
-    {
-    }
-
-    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
-    }
-
-    ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
-      return *this;
-    }
-
-    ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
-    {
-      sType = sType_;
-      return *this;
-    }
-
-    ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator const VkExternalMemoryImageCreateInfoNV&() const
-    {
-      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
-    }
-
-    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    ExternalMemoryHandleTypeFlagsNV handleTypes;
-  };
-  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct ExportMemoryAllocateInfoNV
-  {
-    ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
-      : sType( StructureType::eExportMemoryAllocateInfoNV )
-      , pNext( nullptr )
-      , handleTypes( handleTypes_ )
-    {
-    }
-
-    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
-    }
-
-    ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
-      return *this;
-    }
-
-    ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
-    {
-      sType = sType_;
-      return *this;
-    }
-
-    ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator const VkExportMemoryAllocateInfoNV&() const
-    {
-      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
-    }
-
-    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    ExternalMemoryHandleTypeFlagsNV handleTypes;
-  };
-  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ImportMemoryWin32HandleInfoNV
-  {
-    ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
-      : sType( StructureType::eImportMemoryWin32HandleInfoNV )
-      , pNext( nullptr )
-      , handleType( handleType_ )
-      , handle( handle_ )
-    {
-    }
-
-    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
-    }
-
-    ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
-    {
-      sType = sType_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
-    {
-      handle = handle_;
-      return *this;
-    }
-
-    operator const VkImportMemoryWin32HandleInfoNV&() const
+    Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
     {
-      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+      return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
     }
 
-    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType )
-          && ( handle == rhs.handle );
+      IndirectCommandsLayoutNVX indirectCommandsLayout;
+      Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+      return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
     {
-      return !operator==( rhs );
+      vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
     }
 
-  private:
-    StructureType sType;
-
-  public:
-    const void* pNext;
-    ExternalMemoryHandleTypeFlagsNV handleType;
-    HANDLE handle;
-  };
-  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  enum class ExternalMemoryFeatureFlagBitsNV
-  {
-    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
-    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
-    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
-  };
-
-  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
+    {
+      vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  inline ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
-  {
-    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
-  }
+    Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
+    {
+      return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+    }
 
-  struct ExternalImageFormatPropertiesNV
-  {
-    ExternalImageFormatPropertiesNV( ImageFormatProperties imageFormatProperties_ = ImageFormatProperties(), ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = ExternalMemoryFeatureFlagsNV(), ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = ExternalMemoryHandleTypeFlagsNV(), ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
-      : imageFormatProperties( imageFormatProperties_ )
-      , externalMemoryFeatures( externalMemoryFeatures_ )
-      , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
-      , compatibleHandleTypes( compatibleHandleTypes_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
     {
+      ObjectTableNVX objectTable;
+      Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+      return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs )
+    void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
     {
-      memcpy( this, &rhs, sizeof(ExternalImageFormatPropertiesNV) );
+      vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
     }
 
-    ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const
     {
-      memcpy( this, &rhs, sizeof(ExternalImageFormatPropertiesNV) );
-      return *this;
+      vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    ExternalImageFormatPropertiesNV& setImageFormatProperties( ImageFormatProperties imageFormatProperties_ )
+    Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
     {
-      imageFormatProperties = imageFormatProperties_;
-      return *this;
+      return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
     }
 
-    ExternalImageFormatPropertiesNV& setExternalMemoryFeatures( ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
     {
-      externalMemoryFeatures = externalMemoryFeatures_;
-      return *this;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      assert( pObjectTableEntries.size() == objectIndices.size() );
+#else
+      if ( pObjectTableEntries.size() != objectIndices.size() )
+      {
+        throw std::logic_error( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
+      }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+      Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
+      return createResultValue( result, "vk::Device::registerObjectsNVX" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    ExternalImageFormatPropertiesNV& setExportFromImportedHandleTypes( ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ )
+    Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
     {
-      exportFromImportedHandleTypes = exportFromImportedHandleTypes_;
-      return *this;
+      return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
     }
 
-    ExternalImageFormatPropertiesNV& setCompatibleHandleTypes( ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
     {
-      compatibleHandleTypes = compatibleHandleTypes_;
-      return *this;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      assert( objectEntryTypes.size() == objectIndices.size() );
+#else
+      if ( objectEntryTypes.size() != objectIndices.size() )
+      {
+        throw std::logic_error( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
+      }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+      Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
+      return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
     }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator const VkExternalImageFormatPropertiesNV&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    explicit
+#endif
+    operator VkDevice() const
     {
-      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
+      return m_device;
     }
 
-    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
+    explicit operator bool() const
     {
-      return ( imageFormatProperties == rhs.imageFormatProperties )
-          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
-          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
-          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+      return m_device != VK_NULL_HANDLE;
     }
 
-    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
+    bool operator!() const
     {
-      return !operator==( rhs );
+      return m_device == VK_NULL_HANDLE;
     }
 
-    ImageFormatProperties imageFormatProperties;
-    ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
-    ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
-    ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+  private:
+    VkDevice m_device;
   };
-  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
 
   class PhysicalDevice
   {
@@ -17659,6 +19552,18 @@ namespace vk
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+    void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
+    {
+      vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
+    }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, DeviceGeneratedCommandsLimitsNVX & limits ) const
+    {
+      vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
 #if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
     explicit
 #endif
@@ -17935,6 +19840,14 @@ namespace vk
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
     void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
     {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      assert( layerPrefix.size() == message.size() );
+#else
+      if ( layerPrefix.size() != message.size() )
+      {
+        throw std::logic_error( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
+      }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
       vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
     }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17962,70 +19875,129 @@ namespace vk
   };
   static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
 
-  enum class ValidationCheckEXT
-  {
-    eAll = VK_VALIDATION_CHECK_ALL_EXT
-  };
-
-  struct ValidationFlagsEXT
+  struct CmdProcessCommandsInfoNVX
   {
-    ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
-      : sType( StructureType::eValidationFlagsEXT )
+    CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
+      : sType( StructureType::eCmdProcessCommandsInfoNVX )
       , pNext( nullptr )
-      , disabledValidationCheckCount( disabledValidationCheckCount_ )
-      , pDisabledValidationChecks( pDisabledValidationChecks_ )
+      , objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+      , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+      , maxSequencesCount( maxSequencesCount_ )
+      , targetCommandBuffer( targetCommandBuffer_ )
+      , sequencesCountBuffer( sequencesCountBuffer_ )
+      , sequencesCountOffset( sequencesCountOffset_ )
+      , sequencesIndexBuffer( sequencesIndexBuffer_ )
+      , sequencesIndexOffset( sequencesIndexOffset_ )
     {
     }
 
-    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
+    CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+      memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
     }
 
-    ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
+    CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
     {
-      memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+      memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
       return *this;
     }
 
-    ValidationFlagsEXT& setSType( StructureType sType_ )
+    CmdProcessCommandsInfoNVX& setSType( StructureType sType_ )
     {
       sType = sType_;
       return *this;
     }
 
-    ValidationFlagsEXT& setPNext( const void* pNext_ )
+    CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
     {
       pNext = pNext_;
       return *this;
     }
 
-    ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
+    CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
     {
-      disabledValidationCheckCount = disabledValidationCheckCount_;
+      objectTable = objectTable_;
       return *this;
     }
 
-    ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+    CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
     {
-      pDisabledValidationChecks = pDisabledValidationChecks_;
+      indirectCommandsLayout = indirectCommandsLayout_;
       return *this;
     }
 
-    operator const VkValidationFlagsEXT&() const
+    CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
     {
-      return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+      indirectCommandsTokenCount = indirectCommandsTokenCount_;
+      return *this;
     }
 
-    bool operator==( ValidationFlagsEXT const& rhs ) const
+    CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
+    {
+      pIndirectCommandsTokens = pIndirectCommandsTokens_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
+    {
+      targetCommandBuffer = targetCommandBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
+    {
+      sequencesCountBuffer = sequencesCountBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
+    {
+      sequencesCountOffset = sequencesCountOffset_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
+    {
+      sequencesIndexBuffer = sequencesIndexBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
+    {
+      sequencesIndexOffset = sequencesIndexOffset_;
+      return *this;
+    }
+
+    operator const VkCmdProcessCommandsInfoNVX&() const
+    {
+      return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
+    }
+
+    bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
-          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
+          && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
+          && ( maxSequencesCount == rhs.maxSequencesCount )
+          && ( targetCommandBuffer == rhs.targetCommandBuffer )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
     }
 
-    bool operator!=( ValidationFlagsEXT const& rhs ) const
+    bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
     {
       return !operator==( rhs );
     }
@@ -18035,18 +20007,26 @@ namespace vk
 
   public:
     const void* pNext;
-    uint32_t disabledValidationCheckCount;
-    ValidationCheckEXT* pDisabledValidationChecks;
+    ObjectTableNVX objectTable;
+    IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t indirectCommandsTokenCount;
+    const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
+    uint32_t maxSequencesCount;
+    CommandBuffer targetCommandBuffer;
+    Buffer sequencesCountBuffer;
+    DeviceSize sequencesCountOffset;
+    Buffer sequencesIndexBuffer;
+    DeviceSize sequencesIndexOffset;
   };
-  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
 
-  inline Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
+  VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
   {
     return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
   }
 
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  inline ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
+  VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
   {
     Instance instance;
     Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkInstance*>( &instance ) ) );
@@ -18054,455 +20034,401 @@ namespace vk
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  inline Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
-  {
-    return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
-  }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator = std::allocator<LayerProperties>>
-  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
-  {
-    std::vector<LayerProperties,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    assert( propertyCount <= properties.size() ); 
-    properties.resize( propertyCount ); 
-    return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  inline Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
-  {
-    return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
-  }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator = std::allocator<ExtensionProperties>>
-  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
-  {
-    std::vector<ExtensionProperties,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    assert( propertyCount <= properties.size() ); 
-    properties.resize( propertyCount ); 
-    return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  inline std::string to_string(FramebufferCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(FramebufferCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(QueryPoolCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(QueryPoolCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(RenderPassCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(RenderPassCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(SamplerCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(SamplerCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineLayoutCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineLayoutCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineCacheCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineCacheCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineDepthStencilStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineDepthStencilStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineDynamicStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineDynamicStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineColorBlendStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineColorBlendStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineMultisampleStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineMultisampleStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineRasterizationStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineRasterizationStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineViewportStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineViewportStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineTessellationStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineTessellationStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineInputAssemblyStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineVertexInputStateCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineVertexInputStateCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(PipelineShaderStageCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(PipelineShaderStageCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(DescriptorSetLayoutCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DescriptorSetLayoutCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(BufferViewCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(BufferViewCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(InstanceCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(InstanceCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(DeviceCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DeviceCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(DeviceQueueCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DeviceQueueCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(ImageViewCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(ImageViewCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(SemaphoreCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(SemaphoreCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(ShaderModuleCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(ShaderModuleCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(EventCreateFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(EventCreateFlags)
+  VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(MemoryMapFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(MemoryMapFlags)
+  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(SubpassDescriptionFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(SubpassDescriptionFlags)
+  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(DescriptorPoolResetFlagBits)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DescriptorPoolResetFlags)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
   {
     return "{}";
   }
 
-  inline std::string to_string(SwapchainCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR)
   {
     return "(void)";
   }
 
-  inline std::string to_string(SwapchainCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR)
   {
     return "{}";
   }
 
-  inline std::string to_string(DisplayModeCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DisplayModeCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
   {
     return "{}";
   }
 
-  inline std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 
-  inline std::string to_string(DisplaySurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-  inline std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-  inline std::string to_string(AndroidSurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
 #ifdef VK_USE_PLATFORM_MIR_KHR
-  inline std::string to_string(MirSurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_MIR_KHR*/
 
 #ifdef VK_USE_PLATFORM_MIR_KHR
-  inline std::string to_string(MirSurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_MIR_KHR*/
 
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  inline std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  inline std::string to_string(WaylandSurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  inline std::string to_string(Win32SurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  inline std::string to_string(Win32SurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-  inline std::string to_string(XlibSurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
 
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-  inline std::string to_string(XlibSurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
 
 #ifdef VK_USE_PLATFORM_XCB_KHR
-  inline std::string to_string(XcbSurfaceCreateFlagBitsKHR)
+  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
   {
     return "(void)";
   }
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 
 #ifdef VK_USE_PLATFORM_XCB_KHR
-  inline std::string to_string(XcbSurfaceCreateFlagsKHR)
+  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
   {
     return "{}";
   }
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 
-  inline std::string to_string(ImageLayout value)
+  VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
   {
     switch (value)
     {
@@ -18520,7 +20446,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(AttachmentLoadOp value)
+  VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
   {
     switch (value)
     {
@@ -18531,7 +20457,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(AttachmentStoreOp value)
+  VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
   {
     switch (value)
     {
@@ -18541,7 +20467,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageType value)
+  VULKAN_HPP_INLINE std::string to_string(ImageType value)
   {
     switch (value)
     {
@@ -18552,7 +20478,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageTiling value)
+  VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
   {
     switch (value)
     {
@@ -18562,7 +20488,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageViewType value)
+  VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
   {
     switch (value)
     {
@@ -18577,7 +20503,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CommandBufferLevel value)
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
   {
     switch (value)
     {
@@ -18587,7 +20513,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ComponentSwizzle value)
+  VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
   {
     switch (value)
     {
@@ -18602,7 +20528,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DescriptorType value)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
   {
     switch (value)
     {
@@ -18621,7 +20547,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueryType value)
+  VULKAN_HPP_INLINE std::string to_string(QueryType value)
   {
     switch (value)
     {
@@ -18632,7 +20558,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(BorderColor value)
+  VULKAN_HPP_INLINE std::string to_string(BorderColor value)
   {
     switch (value)
     {
@@ -18646,7 +20572,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PipelineBindPoint value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
   {
     switch (value)
     {
@@ -18656,7 +20582,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PipelineCacheHeaderVersion value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
   {
     switch (value)
     {
@@ -18665,7 +20591,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PrimitiveTopology value)
+  VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
   {
     switch (value)
     {
@@ -18684,7 +20610,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SharingMode value)
+  VULKAN_HPP_INLINE std::string to_string(SharingMode value)
   {
     switch (value)
     {
@@ -18694,7 +20620,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(IndexType value)
+  VULKAN_HPP_INLINE std::string to_string(IndexType value)
   {
     switch (value)
     {
@@ -18704,7 +20630,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(Filter value)
+  VULKAN_HPP_INLINE std::string to_string(Filter value)
   {
     switch (value)
     {
@@ -18715,7 +20641,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SamplerMipmapMode value)
+  VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
   {
     switch (value)
     {
@@ -18725,7 +20651,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SamplerAddressMode value)
+  VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
   {
     switch (value)
     {
@@ -18738,7 +20664,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CompareOp value)
+  VULKAN_HPP_INLINE std::string to_string(CompareOp value)
   {
     switch (value)
     {
@@ -18754,7 +20680,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PolygonMode value)
+  VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
   {
     switch (value)
     {
@@ -18765,7 +20691,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CullModeFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
   {
     switch (value)
     {
@@ -18777,7 +20703,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CullModeFlags value)
+  VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -18788,7 +20714,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(FrontFace value)
+  VULKAN_HPP_INLINE std::string to_string(FrontFace value)
   {
     switch (value)
     {
@@ -18798,7 +20724,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(BlendFactor value)
+  VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
   {
     switch (value)
     {
@@ -18825,7 +20751,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(BlendOp value)
+  VULKAN_HPP_INLINE std::string to_string(BlendOp value)
   {
     switch (value)
     {
@@ -18838,7 +20764,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(StencilOp value)
+  VULKAN_HPP_INLINE std::string to_string(StencilOp value)
   {
     switch (value)
     {
@@ -18854,7 +20780,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(LogicOp value)
+  VULKAN_HPP_INLINE std::string to_string(LogicOp value)
   {
     switch (value)
     {
@@ -18878,7 +20804,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(InternalAllocationType value)
+  VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
   {
     switch (value)
     {
@@ -18887,7 +20813,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SystemAllocationScope value)
+  VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
   {
     switch (value)
     {
@@ -18900,7 +20826,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PhysicalDeviceType value)
+  VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
   {
     switch (value)
     {
@@ -18913,7 +20839,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(VertexInputRate value)
+  VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
   {
     switch (value)
     {
@@ -18923,7 +20849,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(Format value)
+  VULKAN_HPP_INLINE std::string to_string(Format value)
   {
     switch (value)
     {
@@ -19124,7 +21050,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(StructureType value)
+  VULKAN_HPP_INLINE std::string to_string(StructureType value)
   {
     switch (value)
     {
@@ -19202,11 +21128,17 @@ namespace vk
     case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
     case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
     case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
+    case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
+    case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
+    case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
+    case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
+    case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
+    case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
     default: return "invalid";
     }
   }
 
-  inline std::string to_string(SubpassContents value)
+  VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
   {
     switch (value)
     {
@@ -19216,7 +21148,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DynamicState value)
+  VULKAN_HPP_INLINE std::string to_string(DynamicState value)
   {
     switch (value)
     {
@@ -19233,7 +21165,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueueFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
   {
     switch (value)
     {
@@ -19245,7 +21177,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueueFlags value)
+  VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19256,7 +21188,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(MemoryPropertyFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
   {
     switch (value)
     {
@@ -19269,7 +21201,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(MemoryPropertyFlags value)
+  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19281,7 +21213,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(MemoryHeapFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
   {
     switch (value)
     {
@@ -19290,7 +21222,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(MemoryHeapFlags value)
+  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19298,7 +21230,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(AccessFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
   {
     switch (value)
     {
@@ -19319,11 +21251,13 @@ namespace vk
     case AccessFlagBits::eHostWrite: return "HostWrite";
     case AccessFlagBits::eMemoryRead: return "MemoryRead";
     case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
+    case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
+    case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
     default: return "invalid";
     }
   }
 
-  inline std::string to_string(AccessFlags value)
+  VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19344,10 +21278,12 @@ namespace vk
     if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
     if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
     if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
+    if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
+    if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(BufferUsageFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
   {
     switch (value)
     {
@@ -19364,7 +21300,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(BufferUsageFlags value)
+  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19380,7 +21316,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(BufferCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
   {
     switch (value)
     {
@@ -19391,7 +21327,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(BufferCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19401,7 +21337,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ShaderStageFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
   {
     switch (value)
     {
@@ -19417,7 +21353,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ShaderStageFlags value)
+  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19432,7 +21368,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ImageUsageFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
   {
     switch (value)
     {
@@ -19448,7 +21384,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageUsageFlags value)
+  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19463,7 +21399,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ImageCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
   {
     switch (value)
     {
@@ -19476,7 +21412,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19488,7 +21424,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(PipelineCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
   {
     switch (value)
     {
@@ -19499,7 +21435,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(PipelineCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19509,7 +21445,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ColorComponentFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
   {
     switch (value)
     {
@@ -19521,7 +21457,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ColorComponentFlags value)
+  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19532,7 +21468,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(FenceCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
   {
     switch (value)
     {
@@ -19541,7 +21477,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(FenceCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19549,7 +21485,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(FormatFeatureFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
   {
     switch (value)
     {
@@ -19571,7 +21507,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(FormatFeatureFlags value)
+  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19592,7 +21528,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(QueryControlFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
   {
     switch (value)
     {
@@ -19601,7 +21537,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueryControlFlags value)
+  VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19609,7 +21545,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(QueryResultFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
   {
     switch (value)
     {
@@ -19621,7 +21557,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueryResultFlags value)
+  VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19632,7 +21568,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(CommandBufferUsageFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
   {
     switch (value)
     {
@@ -19643,7 +21579,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CommandBufferUsageFlags value)
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19653,7 +21589,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(QueryPipelineStatisticFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
   {
     switch (value)
     {
@@ -19672,7 +21608,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(QueryPipelineStatisticFlags value)
+  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19690,7 +21626,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ImageAspectFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
   {
     switch (value)
     {
@@ -19702,7 +21638,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ImageAspectFlags value)
+  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19713,7 +21649,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(SparseImageFormatFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
   {
     switch (value)
     {
@@ -19724,7 +21660,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SparseImageFormatFlags value)
+  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19734,7 +21670,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(SparseMemoryBindFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
   {
     switch (value)
     {
@@ -19743,7 +21679,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SparseMemoryBindFlags value)
+  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19751,7 +21687,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(PipelineStageFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
   {
     switch (value)
     {
@@ -19772,11 +21708,12 @@ namespace vk
     case PipelineStageFlagBits::eHost: return "Host";
     case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
     case PipelineStageFlagBits::eAllCommands: return "AllCommands";
+    case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
     default: return "invalid";
     }
   }
 
-  inline std::string to_string(PipelineStageFlags value)
+  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19797,10 +21734,11 @@ namespace vk
     if (value & PipelineStageFlagBits::eHost) result += "Host | ";
     if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
     if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
+    if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(CommandPoolCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
   {
     switch (value)
     {
@@ -19810,7 +21748,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CommandPoolCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19819,7 +21757,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(CommandPoolResetFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
   {
     switch (value)
     {
@@ -19828,7 +21766,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CommandPoolResetFlags value)
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19836,7 +21774,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(CommandBufferResetFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
   {
     switch (value)
     {
@@ -19845,7 +21783,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CommandBufferResetFlags value)
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19853,7 +21791,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(SampleCountFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
   {
     switch (value)
     {
@@ -19868,7 +21806,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SampleCountFlags value)
+  VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19882,7 +21820,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(AttachmentDescriptionFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
   {
     switch (value)
     {
@@ -19891,7 +21829,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(AttachmentDescriptionFlags value)
+  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19899,7 +21837,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(StencilFaceFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
   {
     switch (value)
     {
@@ -19910,7 +21848,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(StencilFaceFlags value)
+  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19920,7 +21858,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(DescriptorPoolCreateFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
   {
     switch (value)
     {
@@ -19929,7 +21867,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DescriptorPoolCreateFlags value)
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19937,7 +21875,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(DependencyFlagBits value)
+  VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
   {
     switch (value)
     {
@@ -19946,7 +21884,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DependencyFlags value)
+  VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19954,7 +21892,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(PresentModeKHR value)
+  VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
   {
     switch (value)
     {
@@ -19966,7 +21904,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ColorSpaceKHR value)
+  VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
   {
     switch (value)
     {
@@ -19975,7 +21913,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
   {
     switch (value)
     {
@@ -19987,7 +21925,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DisplayPlaneAlphaFlagsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
   {
     if (!value) return "{}";
     std::string result;
@@ -19998,7 +21936,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(CompositeAlphaFlagBitsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
   {
     switch (value)
     {
@@ -20010,7 +21948,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(CompositeAlphaFlagsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
   {
     if (!value) return "{}";
     std::string result;
@@ -20021,7 +21959,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(SurfaceTransformFlagBitsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
   {
     switch (value)
     {
@@ -20038,7 +21976,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(SurfaceTransformFlagsKHR value)
+  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
   {
     if (!value) return "{}";
     std::string result;
@@ -20054,7 +21992,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(DebugReportFlagBitsEXT value)
+  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
   {
     switch (value)
     {
@@ -20067,7 +22005,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(DebugReportFlagsEXT value)
+  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
   {
     if (!value) return "{}";
     std::string result;
@@ -20079,7 +22017,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(DebugReportObjectTypeEXT value)
+  VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
   {
     switch (value)
     {
@@ -20112,11 +22050,15 @@ namespace vk
     case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
     case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
     case DebugReportObjectTypeEXT::eDebugReport: return "DebugReport";
+    case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
+    case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
+    case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
+    case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
     default: return "invalid";
     }
   }
 
-  inline std::string to_string(DebugReportErrorEXT value)
+  VULKAN_HPP_INLINE std::string to_string(DebugReportErrorEXT value)
   {
     switch (value)
     {
@@ -20126,7 +22068,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(RasterizationOrderAMD value)
+  VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
   {
     switch (value)
     {
@@ -20136,7 +22078,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
   {
     switch (value)
     {
@@ -20148,7 +22090,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
   {
     if (!value) return "{}";
     std::string result;
@@ -20159,7 +22101,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
   {
     switch (value)
     {
@@ -20170,7 +22112,7 @@ namespace vk
     }
   }
 
-  inline std::string to_string(ExternalMemoryFeatureFlagsNV value)
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
   {
     if (!value) return "{}";
     std::string result;
@@ -20180,7 +22122,7 @@ namespace vk
     return "{" + result.substr(0, result.size() - 3) + "}";
   }
 
-  inline std::string to_string(ValidationCheckEXT value)
+  VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
   {
     switch (value)
     {
@@ -20189,6 +22131,77 @@ namespace vk
     }
   }
 
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
+  {
+    switch (value)
+    {
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
+  {
+    switch (value)
+    {
+    case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
+    case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
+    if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
+  {
+    switch (value)
+    {
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline: return "VkIndirectCommandsTokenPipeline";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDescriptorSet: return "VkIndirectCommandsTokenDescriptorSet";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenIndexBuffer: return "VkIndirectCommandsTokenIndexBuffer";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenVertexBuffer: return "VkIndirectCommandsTokenVertexBuffer";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPushConstant: return "VkIndirectCommandsTokenPushConstant";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDrawIndexed: return "VkIndirectCommandsTokenDrawIndexed";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDraw: return "VkIndirectCommandsTokenDraw";
+    case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDispatch: return "VkIndirectCommandsTokenDispatch";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
+  {
+    switch (value)
+    {
+    case ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet: return "VkObjectEntryDescriptorSet";
+    case ObjectEntryTypeNVX::eVkObjectEntryPipeline: return "VkObjectEntryPipeline";
+    case ObjectEntryTypeNVX::eVkObjectEntryIndexBuffer: return "VkObjectEntryIndexBuffer";
+    case ObjectEntryTypeNVX::eVkObjectEntryVertexBuffer: return "VkObjectEntryVertexBuffer";
+    case ObjectEntryTypeNVX::eVkObjectEntryPushConstant: return "VkObjectEntryPushConstant";
+    default: return "invalid";
+    }
+  }
+
 } // namespace vk
 
 #endif
index bc770f6..fdbbca4 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_core_validation",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_core_validation.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index a2bb1ca..3e6ffef 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_image",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_image.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 5d5bd27..f713a7a 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_object_tracker",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_object_tracker.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index a75aa49..ca10a07 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_parameter_validation",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_parameter_validation.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 8418318..b5b5c94 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_swapchain",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_swapchain.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 457c2f8..ab27394 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_GOOGLE_threading",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_threading.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "Google Validation Layer",
         "instance_extensions": [
index 8c804e6..6245aa6 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_GOOGLE_unique_objects",
         "type": "GLOBAL",
         "library_path": "./libVkLayer_unique_objects.so",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "Google Validation Layer"
     }
index 1d0924d..ac5b17b 100644 (file)
@@ -245,9 +245,12 @@ struct layer_data {
     counter<VkSemaphore> c_VkSemaphore;
     counter<VkShaderModule> c_VkShaderModule;
     counter<VkDebugReportCallbackEXT> c_VkDebugReportCallbackEXT;
+    counter<VkObjectTableNVX> c_VkObjectTableNVX;
+    counter<VkIndirectCommandsLayoutNVX>c_VkIndirectCommandsLayoutNVX;
 #else  // DISTINCT_NONDISPATCHABLE_HANDLES
     counter<uint64_t> c_uint64_t;
 #endif // DISTINCT_NONDISPATCHABLE_HANDLES
+
     layer_data()
         : report_data(nullptr), num_tmp_callbacks(0), tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr),
           c_VkCommandBuffer("VkCommandBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT),
@@ -274,7 +277,9 @@ struct layer_data {
           c_VkSampler("VkSampler", VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT),
           c_VkSemaphore("VkSemaphore", VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT),
           c_VkShaderModule("VkShaderModule", VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT),
-          c_VkDebugReportCallbackEXT("VkDebugReportCallbackEXT", VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT)
+          c_VkDebugReportCallbackEXT("VkDebugReportCallbackEXT", VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT),
+          c_VkObjectTableNVX("VkObjectTableNVX", VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT),
+          c_VkIndirectCommandsLayoutNVX("VkIndirectCommandsLayoutNVX", VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT)
 #else  // DISTINCT_NONDISPATCHABLE_HANDLES
           c_uint64_t("NON_DISPATCHABLE_HANDLE", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT)
 #endif // DISTINCT_NONDISPATCHABLE_HANDLES
@@ -316,10 +321,13 @@ WRAPPER(VkSampler)
 WRAPPER(VkSemaphore)
 WRAPPER(VkShaderModule)
 WRAPPER(VkDebugReportCallbackEXT)
+WRAPPER(VkObjectTableNVX)
+WRAPPER(VkIndirectCommandsLayoutNVX)
 #else  // DISTINCT_NONDISPATCHABLE_HANDLES
 WRAPPER(uint64_t)
 #endif // DISTINCT_NONDISPATCHABLE_HANDLES
 
+
 static std::unordered_map<void *, layer_data *> layer_data_map;
 static std::mutex command_pool_lock;
 static std::unordered_map<VkCommandBuffer, VkCommandPool> command_pool_map;
index 2c88652..eb73b43 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_core_validation",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_core_validation.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 5bfdd1f..9e9a4d2 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_image",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_image.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 12cb902..b340fa4 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_object_tracker",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_object_tracker.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 74291d4..437bd91 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_parameter_validation",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_parameter_validation.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index f76d1ce..79f4220 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_LUNARG_swapchain",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_swapchain.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "LunarG Validation Layer",
         "instance_extensions": [
index 2bba51c..8d6a392 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_GOOGLE_threading",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_threading.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "Google Validation Layer",
         "instance_extensions": [
index a243ed3..94b2c49 100644 (file)
@@ -4,7 +4,7 @@
         "name": "VK_LAYER_GOOGLE_unique_objects",
         "type": "GLOBAL",
         "library_path": ".\\VkLayer_unique_objects.dll",
-        "api_version": "1.0.34",
+        "api_version": "1.0.37",
         "implementation_version": "1",
         "description": "Google Validation Layer"
     }
index 095098a..373e80c 100644 (file)
@@ -111,7 +111,7 @@ else()
     set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith")
 
     add_library(${API_LOWERCASE} SHARED ${LOADER_SRCS})
-    set_target_properties(${API_LOWERCASE} PROPERTIES SOVERSION "1" VERSION "1.0.34")
+    set_target_properties(${API_LOWERCASE} PROPERTIES SOVERSION "1" VERSION "1.0.37")
     target_link_libraries(${API_LOWERCASE} -ldl -lpthread -lm)
     install(TARGETS ${API_LOWERCASE} LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
 endif()
index 043121c..3c5cc0a 100755 (executable)
@@ -318,7 +318,8 @@ class OutputGenerator:
         # Open specified output file. Not done in constructor since a
         # Generator can be used without writing to a file.
         if (self.genOpts.filename != None):
-            self.outFile = open(self.genOpts.directory + '/' + self.genOpts.filename, 'w')
+            filename = self.genOpts.directory + '/' + self.genOpts.filename
+            self.outFile = open(filename, 'w', encoding='utf-8')
         else:
             self.outFile = sys.stdout
     def endFile(self):
index 63b8bab..07a7bcd 100644 (file)
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python3
 #
 # Copyright (c) 2013-2016 The Khronos Group Inc.
 #
@@ -50,7 +50,7 @@ def makeREstring(list):
 # extensions - list of extension names to include.
 # protect - True if re-inclusion protection should be added to headers
 # directory - path to directory in which to generate the target(s)
-def makeGenOpts(extensions = [], protect = True, directory = '.'):
+def makeGenOpts(extensions = [], removeExtensions = [], protect = True, directory = '.'):
     global genOpts
     genOpts = {}
 
@@ -60,7 +60,7 @@ def makeGenOpts(extensions = [], protect = True, directory = '.'):
     noVersions      = noExtensions = None
 
     addExtensions     = makeREstring(extensions)
-    removeExtensions  = makeREstring([])
+    removeExtensions  = makeREstring(removeExtensions)
 
     # Copyright text prefixing all headers (list of strings).
     prefixStrings = [
@@ -199,6 +199,7 @@ def genTarget(args):
 
     # Create generator options with specified parameters
     makeGenOpts(extensions = args.extension,
+                removeExtensions = args.removeExtension,
                 protect = args.protect,
                 directory = args.directory)
 
@@ -228,6 +229,9 @@ if __name__ == '__main__':
     parser.add_argument('-extension', action='append',
                         default=[],
                         help='Specify an extension or extensions to add to targets')
+    parser.add_argument('-removeExtension', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to remove from targets')
     parser.add_argument('-debug', action='store_true',
                         help='Enable debugging')
     parser.add_argument('-dump', action='store_true',
@@ -276,16 +280,16 @@ if __name__ == '__main__':
 
     if (args.dump):
         write('* Dumping registry to regdump.txt', file=sys.stderr)
-        reg.dumpReg(filehandle = open('regdump.txt','w'))
+        reg.dumpReg(filehandle = open('regdump.txt','w', encoding='utf-8'))
 
     # create error/warning & diagnostic files
     if (args.errfile):
-        errWarn = open(args.errfile, 'w')
+        errWarn = open(args.errfile, 'w', encoding='utf-8')
     else:
         errWarn = sys.stderr
 
     if (args.diagfile):
-        diag = open(args.diagfile, 'w')
+        diag = open(args.diagfile, 'w', encoding='utf-8')
     else:
         diag = None
 
index ed30fcd..869e127 100644 (file)
@@ -104,7 +104,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <type category="define">// Vulkan 1.0 version number
 #define <name>VK_API_VERSION_1_0</name> <type>VK_MAKE_VERSION</type>(1, 0, 0)</type>    <!-- The patch version here should never be set to anything other than 0 -->
         <type category="define">// Version of this file
-#define <name>VK_HEADER_VERSION</name> 34</type>
+#define <name>VK_HEADER_VERSION</name> 37</type>
 
         <type category="define">
 #define <name>VK_DEFINE_HANDLE</name>(object) typedef struct object##_T* object;</type>
@@ -195,6 +195,10 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <type requires="VkDescriptorPoolCreateFlagBits"   category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorPoolCreateFlags</name>;</type>        <!-- Descriptor pool creation flags -->
         <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorPoolResetFlags</name>;</type>         <!-- Descriptor pool reset flags -->
         <type requires="VkDependencyFlagBits"             category="bitmask">typedef <type>VkFlags</type> <name>VkDependencyFlags</name>;</type>                  <!-- Pipeline barrier and subpass dependency flags -->
+
+        <type requires="VkIndirectCommandsLayoutUsageFlagBitsNVX"  category="bitmask">typedef <type>VkFlags</type> <name>VkIndirectCommandsLayoutUsageFlagsNVX</name>;</type>  <!-- Device generated commands usage flags -->
+        <type requires="VkObjectEntryUsageFlagBitsNVX"             category="bitmask">typedef <type>VkFlags</type> <name>VkObjectEntryUsageFlagsNVX</name>;</type>             <!-- Object usage flags -->
+
             <!-- WSI extensions -->
         <type requires="VkCompositeAlphaFlagBitsKHR"      category="bitmask">typedef <type>VkFlags</type> <name>VkCompositeAlphaFlagsKHR</name>;</type>
         <type requires="VkDisplayPlaneAlphaFlagBitsKHR"   category="bitmask">typedef <type>VkFlags</type> <name>VkDisplayPlaneAlphaFlagsKHR</name>;</type>
@@ -239,6 +243,8 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkFramebuffer</name>)</type>
         <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkRenderPass</name>)</type>
         <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkPipelineCache</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkObjectTableNVX</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkIndirectCommandsLayoutNVX</name>)</type>
 
         <!-- WSI extensions -->
         <type category="handle"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDisplayKHR</name>)</type>
@@ -334,6 +340,10 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <type name="VkAttachmentDescriptionFlagBits" category="enum"/>
         <type name="VkDescriptorPoolCreateFlagBits" category="enum"/>
         <type name="VkDependencyFlagBits" category="enum"/>
+        <type name="VkIndirectCommandsLayoutUsageFlagBitsNVX" category="enum"/>
+        <type name="VkIndirectCommandsTokenTypeNVX" category="enum"/>
+        <type name="VkObjectEntryUsageFlagBitsNVX" category="enum"/>
+        <type name="VkObjectEntryTypeNVX" category="enum"/>
         <!-- WSI extensions -->
         <type name="VkColorSpaceKHR" category="enum"/>
         <type name="VkCompositeAlphaFlagBitsKHR" category="enum"/>
@@ -1444,21 +1454,21 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <member values="VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>                    <!-- Pointer to next structure -->
             <member optional="true"><type>VkAndroidSurfaceCreateFlagsKHR</type>   <name>flags</name></member>    <!-- Reserved -->
-            <member><type>ANativeWindow</type>*                   <name>window</name></member>
+            <member noautovalidity="true"><type>ANativeWindow</type>*                   <name>window</name></member>
         </type>
         <type category="struct" name="VkMirSurfaceCreateInfoKHR">
             <member values="VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>                    <!-- Pointer to next structure -->
             <member optional="true"><type>VkMirSurfaceCreateFlagsKHR</type>   <name>flags</name></member>        <!-- Reserved -->
-            <member><type>MirConnection</type>*                   <name>connection</name></member>
-            <member><type>MirSurface</type>*                      <name>mirSurface</name></member>
+            <member noautovalidity="true"><type>MirConnection</type>*                   <name>connection</name></member>
+            <member noautovalidity="true"><type>MirSurface</type>*                      <name>mirSurface</name></member>
         </type>
         <type category="struct" name="VkWaylandSurfaceCreateInfoKHR">
             <member values="VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>                    <!-- Pointer to next structure -->
             <member optional="true"><type>VkWaylandSurfaceCreateFlagsKHR</type>   <name>flags</name></member>    <!-- Reserved -->
-            <member>struct <type>wl_display</type>*               <name>display</name></member>
-            <member>struct <type>wl_surface</type>*               <name>surface</name></member>
+            <member noautovalidity="true">struct <type>wl_display</type>*               <name>display</name></member>
+            <member noautovalidity="true">struct <type>wl_surface</type>*               <name>surface</name></member>
         </type>
         <type category="struct" name="VkWin32SurfaceCreateInfoKHR">
             <member values="VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
@@ -1471,14 +1481,14 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <member values="VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>                    <!-- Pointer to next structure -->
             <member optional="true"><type>VkXlibSurfaceCreateFlagsKHR</type>   <name>flags</name></member>       <!-- Reserved -->
-            <member><type>Display</type>*                         <name>dpy</name></member>
+            <member noautovalidity="true"><type>Display</type>*                         <name>dpy</name></member>
             <member><type>Window</type>                           <name>window</name></member>
         </type>
         <type category="struct" name="VkXcbSurfaceCreateInfoKHR">
             <member values="VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>                    <!-- Pointer to next structure -->
             <member optional="true"><type>VkXcbSurfaceCreateFlagsKHR</type>   <name>flags</name></member>        <!-- Reserved -->
-            <member><type>xcb_connection_t</type>*                <name>connection</name></member>
+            <member noautovalidity="true"><type>xcb_connection_t</type>*                <name>connection</name></member>
             <member><type>xcb_window_t</type>                     <name>window</name></member>
         </type>
         <type category="struct" name="VkSurfaceFormatKHR" returnedonly="true">
@@ -1571,33 +1581,33 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <member optional="true"><type>VkImage</type>          <name>image</name></member>                    <!-- Image that this allocation will be bound to -->
             <member optional="true"><type>VkBuffer</type>         <name>buffer</name></member>                   <!-- Buffer that this allocation will be bound to -->
         </type>
-        <type category="struct" name="VkExternalImageFormatPropertiesNV">
+        <type category="struct" name="VkExternalImageFormatPropertiesNV" returnedonly="true">
             <member><type>VkImageFormatProperties</type>          <name>imageFormatProperties</name></member>
-            <member><type>VkExternalMemoryFeatureFlagsNV</type>   <name>externalMemoryFeatures</name></member>
-            <member><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>exportFromImportedHandleTypes</name></member>
-            <member><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>compatibleHandleTypes</name></member>
+            <member optional="true"><type>VkExternalMemoryFeatureFlagsNV</type>   <name>externalMemoryFeatures</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>exportFromImportedHandleTypes</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>compatibleHandleTypes</name></member>
         </type>
         <type category="struct" name="VkExternalMemoryImageCreateInfoNV">
             <member values="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>
-            <member><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
         </type>
         <type category="struct" name="VkExportMemoryAllocateInfoNV">
             <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>
-            <member><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
         </type>
         <type category="struct" name="VkImportMemoryWin32HandleInfoNV">
             <member values="VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>
             <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleType</name></member>
-            <member><type>HANDLE</type>                           <name>handle</name></member>
+            <member optional="true"><type>HANDLE</type>                           <name>handle</name></member>
         </type>
         <type category="struct" name="VkExportMemoryWin32HandleInfoNV">
             <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
             <member>const <type>void</type>*                      <name>pNext</name></member>
-            <member>const <type>SECURITY_ATTRIBUTES</type>*       <name>pAttributes</name></member>
-            <member><type>DWORD</type>                            <name>dwAccess</name></member>
+            <member optional="true">const <type>SECURITY_ATTRIBUTES</type>*       <name>pAttributes</name></member>
+            <member optional="true"><type>DWORD</type>                            <name>dwAccess</name></member>
         </type>
         <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoNV">
             <member values="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
@@ -1610,6 +1620,106 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <member len="releaseCount">const <type>VkDeviceMemory</type>*            <name>pReleaseSyncs</name></member>
             <member len="releaseCount">const <type>uint64_t</type>*                  <name>pReleaseKeys</name></member>
         </type>
+
+        <type category="struct" name="VkDeviceGeneratedCommandsFeaturesNVX">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>computeBindingPointSupport</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGeneratedCommandsLimitsNVX">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>maxIndirectCommandsLayoutTokenCount</name></member>
+            <member><type>uint32_t</type>                         <name>maxObjectEntryCounts</name></member>
+            <member><type>uint32_t</type>                         <name>minSequenceCountBufferOffsetAlignment</name></member>
+            <member><type>uint32_t</type>                         <name>minSequenceIndexBufferOffsetAlignment</name></member>
+            <member><type>uint32_t</type>                         <name>minCommandsTokenBufferOffsetAlignment</name></member>
+        </type>
+        <type category="struct" name="VkIndirectCommandsTokenNVX">
+            <member><type>VkIndirectCommandsTokenTypeNVX</type>      <name>tokenType</name></member>
+            <member><type>VkBuffer</type>                         <name>buffer</name></member>  <!-- buffer containing tableEntries and additional data for indirectCommands -->
+            <member><type>VkDeviceSize</type>                     <name>offset</name></member>  <!-- offset from the base address of the buffer -->
+        </type>
+        <type category="struct" name="VkIndirectCommandsLayoutTokenNVX">
+            <member><type>VkIndirectCommandsTokenTypeNVX</type>      <name>tokenType</name></member>
+            <member><type>uint32_t</type>                         <name>bindingUnit</name></member>  <!-- Binding unit for vertex attribute / descriptor set, offset for pushconstants -->
+            <member><type>uint32_t</type>                         <name>dynamicCount</name></member> <!-- Number of variable dynamic values for descriptor set / push constants -->
+            <member><type>uint32_t</type>                         <name>divisor</name></member>      <!-- Rate the which the array is advanced per element (must be power of 2, minimum 1) -->
+        </type>
+        <type category="struct" name="VkIndirectCommandsLayoutCreateInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkPipelineBindPoint</type>                      <name>pipelineBindPoint</name></member>
+            <member><type>VkIndirectCommandsLayoutUsageFlagsNVX</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>                                 <name>tokenCount</name></member>
+            <member len="tokenCount">const <type>VkIndirectCommandsLayoutTokenNVX</type>*  <name>pTokens</name></member>
+        </type>
+        <type category="struct" name="VkCmdProcessCommandsInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkObjectTableNVX</type>                                         <name>objectTable</name></member>
+            <member><type>VkIndirectCommandsLayoutNVX</type>                              <name>indirectCommandsLayout</name></member>
+            <member><type>uint32_t</type>                                                 <name>indirectCommandsTokenCount</name></member>
+            <member len="indirectCommandsTokenCount">const <type>VkIndirectCommandsTokenNVX</type>*       <name>pIndirectCommandsTokens</name></member>
+            <member><type>uint32_t</type>                                                 <name>maxSequencesCount</name></member>
+            <member optional="true" externsync="true"><type>VkCommandBuffer</type>                          <name>targetCommandBuffer</name></member>
+            <member optional="true"><type>VkBuffer</type>                                 <name>sequencesCountBuffer</name></member>
+            <member optional="true"><type>VkDeviceSize</type>                             <name>sequencesCountOffset</name></member>
+            <member optional="true"><type>VkBuffer</type>                                 <name>sequencesIndexBuffer</name></member>
+            <member optional="true"><type>VkDeviceSize</type>                             <name>sequencesIndexOffset</name></member>
+        </type>
+        <type category="struct" name="VkCmdReserveSpaceForCommandsInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkObjectTableNVX</type>                                         <name>objectTable</name></member>
+            <member><type>VkIndirectCommandsLayoutNVX</type>                              <name>indirectCommandsLayout</name></member>
+            <member><type>uint32_t</type>                                                 <name>maxSequencesCount</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableCreateInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                                          <name>objectCount</name></member>
+            <member len="objectCount">const <type>VkObjectEntryTypeNVX</type>*       <name>pObjectEntryTypes</name></member>
+            <member len="objectCount">const <type>uint32_t</type>*                   <name>pObjectEntryCounts</name></member>
+            <member len="objectCount">const <type>VkObjectEntryUsageFlagsNVX</type>* <name>pObjectEntryUsageFlags</name></member>
+
+            <member><type>uint32_t</type> <name>maxUniformBuffersPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxStorageBuffersPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxStorageImagesPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxSampledImagesPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxPipelineLayouts</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkObjectTablePipelineEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipeline</type>                   <name>pipeline</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableDescriptorSetEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipelineLayout</type>             <name>pipelineLayout</name></member>
+            <member><type>VkDescriptorSet</type>              <name>descriptorSet</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableVertexBufferEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkBuffer</type>                     <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableIndexBufferEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkBuffer</type>                     <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkObjectTablePushConstantEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipelineLayout</type>             <name>pipelineLayout</name></member>
+            <member><type>VkShaderStageFlags</type>           <name>stageFlags</name></member>
+        </type>
     </types>
 
     <!-- SECTION: Vulkan enumerant (token) definitions. -->
@@ -2410,6 +2520,10 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <enum value="26" name="VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT"/>
         <enum value="27" name="VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT"/>
         <enum value="28" name="VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT"/>
+        <enum value="29" name="VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT"/>
+        <enum value="30" name="VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT"/>
+        <enum value="31" name="VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT"/>
+        <enum value="32" name="VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT"/>
     </enums>
     <enums name="VkDebugReportErrorEXT" type="enum">
         <enum value="0" name="VK_DEBUG_REPORT_ERROR_NONE_EXT"/>         <!-- Used for INFO & other non-error messages -->
@@ -2434,7 +2548,33 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         <enum value="0" name="VK_VALIDATION_CHECK_ALL_EXT"/>
         <!-- Placeholder for validation enums to be defined for VK_EXT_Validation_flags extension -->
     </enums>
-
+    <enums name="VkIndirectCommandsLayoutUsageFlagBitsNVX" type="bitmask">
+        <enum bitpos="0" name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX"/> <!-- sequences can be processed in implementation-dependent order -->
+        <enum bitpos="1" name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX"/>    <!-- likely generated with a high difference in actual sequencesCount and maxSequencesCount -->
+        <enum bitpos="2" name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX"/>    <!-- likely to contain draw/dispatch calls that are zero-sized -->
+        <enum bitpos="3" name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX"/>   <!-- custom sequence index permutation (32-bit) is provided -->
+    </enums>
+    <enums name="VkObjectEntryUsageFlagBitsNVX" type="bitmask">
+        <enum bitpos="0" name="VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX"/>
+        <enum bitpos="1" name="VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX"/>
+    </enums>
+    <enums name="VkIndirectCommandsTokenTypeNVX" type="enum">
+        <enum value="0" name="VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX"/>        <!-- array of 32bit tableEntry in the object table -->
+        <enum value="1" name="VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX"/>  <!-- array of (32 bit tableEntry + variable count 32bit offsets) -->
+        <enum value="2" name="VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX"/>    <!-- array of (32 bit tableEntry + optional 32bit offset) -->
+        <enum value="3" name="VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX"/>   <!-- array of (32 bit tableEntry + optional 32bit offset) -->
+        <enum value="4" name="VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX"/>   <!-- array of (32 bit tableEntry + variable count 32bit values ) -->
+        <enum value="5" name="VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX"/>    <!-- array of VkDrawIndexedIndirectCommand -->
+        <enum value="6" name="VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX"/>            <!-- array of VkDrawIndirectCommand -->
+        <enum value="7" name="VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX"/>        <!-- array of VkDispatchIndirectCommand -->
+    </enums>
+    <enums name="VkObjectEntryTypeNVX" type="enum">
+        <enum value="0" name="VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX"/>
+        <enum value="1" name="VK_OBJECT_ENTRY_PIPELINE_NVX"/>
+        <enum value="2" name="VK_OBJECT_ENTRY_INDEX_BUFFER_NVX"/>
+        <enum value="3" name="VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX"/>
+        <enum value="4" name="VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX"/>
+    </enums>
     <!-- SECTION: Vulkan command definitions -->
     <commands>
         <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_LAYER_NOT_PRESENT,VK_ERROR_EXTENSION_NOT_PRESENT,VK_ERROR_INCOMPATIBLE_DRIVER">
@@ -3131,7 +3271,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param len="bindingCount">const <type>VkBuffer</type>* <name>pBuffers</name></param>
             <param len="bindingCount">const <type>VkDeviceSize</type>* <name>pOffsets</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDraw</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>uint32_t</type> <name>vertexCount</name></param>
@@ -3139,7 +3279,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>firstVertex</name></param>
             <param><type>uint32_t</type> <name>firstInstance</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDrawIndexed</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>uint32_t</type> <name>indexCount</name></param>
@@ -3148,7 +3288,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>int32_t</type> <name>vertexOffset</name></param>
             <param><type>uint32_t</type> <name>firstInstance</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDrawIndirect</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>buffer</name></param>
@@ -3156,7 +3296,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>drawCount</name></param>
             <param><type>uint32_t</type> <name>stride</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDrawIndexedIndirect</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>buffer</name></param>
@@ -3164,20 +3304,20 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>drawCount</name></param>
             <param><type>uint32_t</type> <name>stride</name></param>
         </command>
-        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
             <proto><type>void</type> <name>vkCmdDispatch</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>uint32_t</type> <name>x</name></param>
             <param><type>uint32_t</type> <name>y</name></param>
             <param><type>uint32_t</type> <name>z</name></param>
         </command>
-        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
             <proto><type>void</type> <name>vkCmdDispatchIndirect</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>buffer</name></param>
             <param><type>VkDeviceSize</type> <name>offset</name></param>
         </command>
-        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdCopyBuffer</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>srcBuffer</name></param>
@@ -3185,7 +3325,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>regionCount</name></param>
             <param len="regionCount">const <type>VkBufferCopy</type>* <name>pRegions</name></param>
         </command>
-        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdCopyImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>srcImage</name></param>
@@ -3195,7 +3335,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>regionCount</name></param>
             <param len="regionCount">const <type>VkImageCopy</type>* <name>pRegions</name></param>
         </command>
-        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdBlitImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>srcImage</name></param>
@@ -3206,7 +3346,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param len="regionCount">const <type>VkImageBlit</type>* <name>pRegions</name></param>
             <param><type>VkFilter</type> <name>filter</name></param>
         </command>
-        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdCopyBufferToImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>srcBuffer</name></param>
@@ -3215,7 +3355,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>regionCount</name></param>
             <param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
         </command>
-        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdCopyImageToBuffer</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>srcImage</name></param>
@@ -3224,7 +3364,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>regionCount</name></param>
             <param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
         </command>
-        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdUpdateBuffer</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>dstBuffer</name></param>
@@ -3232,7 +3372,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>VkDeviceSize</type> <name>dataSize</name></param>
             <param len="dataSize">const <type>void</type>* <name>pData</name></param>
         </command>
-        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdFillBuffer</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>dstBuffer</name></param>
@@ -3240,7 +3380,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>VkDeviceSize</type> <name>size</name></param>
             <param><type>uint32_t</type> <name>data</name></param>
         </command>
-        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdClearColorImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>image</name></param>
@@ -3249,7 +3389,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>rangeCount</name></param>
             <param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
         </command>
-        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdClearDepthStencilImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>image</name></param>
@@ -3258,7 +3398,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>rangeCount</name></param>
             <param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdClearAttachments</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>uint32_t</type> <name>attachmentCount</name></param>
@@ -3266,7 +3406,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>rectCount</name></param>
             <param len="rectCount">const <type>VkClearRect</type>* <name>pRects</name></param>
         </command>
-        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdResolveImage</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkImage</type> <name>srcImage</name></param>
@@ -3335,14 +3475,14 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>firstQuery</name></param>
             <param><type>uint32_t</type> <name>queryCount</name></param>
         </command>
-        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdWriteTimestamp</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkPipelineStageFlagBits</type> <name>pipelineStage</name></param>
             <param><type>VkQueryPool</type> <name>queryPool</name></param>
             <param><type>uint32_t</type> <name>query</name></param>
         </command>
-        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
             <proto><type>void</type> <name>vkCmdCopyQueryPoolResults</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkQueryPool</type> <name>queryPool</name></param>
@@ -3362,18 +3502,18 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>size</name></param>
             <param len="size">const <type>void</type>* <name>pValues</name></param>
         </command>
-        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary">
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdBeginRenderPass</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param>const <type>VkRenderPassBeginInfo</type>* <name>pRenderPassBegin</name></param>
             <param><type>VkSubpassContents</type> <name>contents</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdNextSubpass</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkSubpassContents</type> <name>contents</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdEndRenderPass</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
         </command>
@@ -3600,8 +3740,8 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint64_t</type> <name>object</name></param>
             <param><type>size_t</type> <name>location</name></param>
             <param><type>int32_t</type> <name>messageCode</name></param>
-            <param>const <type>char</type>* <name>pLayerPrefix</name></param>
-            <param>const <type>char</type>* <name>pMessage</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pLayerPrefix</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pMessage</name></param>
         </command>
         <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
             <proto><type>VkResult</type> <name>vkDebugMarkerSetObjectNameEXT</name></proto>
@@ -3634,8 +3774,8 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>VkImageType</type> <name>type</name></param>
             <param><type>VkImageTiling</type> <name>tiling</name></param>
             <param><type>VkImageUsageFlags</type> <name>usage</name></param>
-            <param><type>VkImageCreateFlags</type> <name>flags</name></param>
-            <param><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>externalHandleType</name></param>
+            <param optional="true"><type>VkImageCreateFlags</type> <name>flags</name></param>
+            <param optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>externalHandleType</name></param>
             <param><type>VkExternalImageFormatPropertiesNV</type>* <name>pExternalImageFormatProperties</name></param>
         </command>
         <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
@@ -3645,7 +3785,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleType</name></param>
             <param><type>HANDLE</type>* <name>pHandle</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDrawIndirectCountAMD</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>buffer</name></param>
@@ -3655,7 +3795,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>maxDrawCount</name></param>
             <param><type>uint32_t</type> <name>stride</name></param>
         </command>
-        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
             <proto><type>void</type> <name>vkCmdDrawIndexedIndirectCountAMD</name></proto>
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>VkBuffer</type> <name>buffer</name></param>
@@ -3665,6 +3805,64 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
             <param><type>uint32_t</type> <name>maxDrawCount</name></param>
             <param><type>uint32_t</type> <name>stride</name></param>
         </command>
+        <command queues="graphics,compute" renderpass="inside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdProcessCommandsNVX</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkCmdProcessCommandsInfoNVX</type>* <name>pProcessCommandsInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="inside" cmdbufferlevel="secondary">
+            <proto><type>void</type> <name>vkCmdReserveSpaceForCommandsNVX</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkCmdReserveSpaceForCommandsInfoNVX</type>* <name>pReserveSpaceInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateIndirectCommandsLayoutNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkIndirectCommandsLayoutCreateInfoNVX</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkIndirectCommandsLayoutNVX</type>* <name>pIndirectCommandsLayout</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyIndirectCommandsLayoutNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkIndirectCommandsLayoutNVX</type> <name>indirectCommandsLayout</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateObjectTableNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkObjectTableCreateInfoNVX</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkObjectTableNVX</type>* <name>pObjectTable</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyObjectTableNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkRegisterObjectsNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param><type>uint32_t</type> <name>objectCount</name></param>
+            <param len="objectCount">const <type>VkObjectTableEntryNVX</type>* const*    <name>ppObjectTableEntries</name></param>
+            <param len="objectCount">const <type>uint32_t</type>* <name>pObjectIndices</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkUnregisterObjectsNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param><type>uint32_t</type> <name>objectCount</name></param>
+            <param len="objectCount">const <type>VkObjectEntryTypeNVX</type>* <name>pObjectEntryTypes</name></param>
+            <param len="objectCount">const <type>uint32_t</type>* <name>pObjectIndices</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkDeviceGeneratedCommandsFeaturesNVX</type>* <name>pFeatures</name></param>
+            <param><type>VkDeviceGeneratedCommandsLimitsNVX</type>* <name>pLimits</name></param>
+        </command>
     </commands>
 
     <!-- SECTION: Vulkan API interface definitions -->
@@ -4037,7 +4235,7 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
         </extension>
         <extension name="VK_EXT_debug_report" number="12" type="instance" author="Google, Inc." contact="Courtney Goeltzenleuchter @courtney" supported="vulkan">
             <require>
-                <enum value="3"                                         name="VK_EXT_DEBUG_REPORT_SPEC_VERSION"/>
+                <enum value="4"                                         name="VK_EXT_DEBUG_REPORT_SPEC_VERSION"/>
                 <enum value="&quot;VK_EXT_debug_report&quot;"           name="VK_EXT_DEBUG_REPORT_EXTENSION_NAME"/>
                 <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT"/>
                 <enum offset="1" dir="-" extends="VkResult"             name="VK_ERROR_VALIDATION_FAILED_EXT"/>
@@ -4547,12 +4745,52 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
                  <enum value="&quot;VK_KHR_extension_86&quot;"           name="VK_KHR_EXTENSION_86_EXTENSION_NAME"/>
               </require>
          </extension>
-         <extension name="VK_NV_extension_87" number="87" author="NVIDIA" contact="Christoph Kubisch @pixeljetstream" supported="disabled">
-             <require>
-                 <enum value="0"                                         name="VK_NV_EXTENSION_87_SPEC_VERSION"/>
-                 <enum value="&quot;VK_NV_extension_87&quot;"            name="VK_NV_EXTENSION_87_EXTENSION_NAME"/>
-              </require>
-         </extension>
+        <extension name="VK_NVX_device_generated_commands" number="87" type="device" author="NVIDIA" contact="Christoph Kubisch @pixeljetstream" supported="vulkan">
+          <require>
+              <enum value="1"                                               name="VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION"/>
+              <enum value="&quot;VK_NVX_device_generated_commands&quot;"    name="VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME"/>
+              <enum offset="0" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX"/>
+              <enum offset="1" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX"/>
+              <enum offset="2" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX"/>
+              <enum offset="3" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX"/>
+              <enum offset="4" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX"/>
+              <enum offset="5" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX"/>
+              <enum bitpos="17" extends="VkPipelineStageFlagBits"           name="VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX"/>
+              <enum bitpos="17" extends="VkAccessFlagBits"                  name="VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX"/>
+              <enum bitpos="18" extends="VkAccessFlagBits"                  name="VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX"/>
+              <type name="VkObjectTableNVX"/>
+              <type name="VkIndirectCommandsLayoutNVX"/>
+              <type name="VkIndirectCommandsLayoutUsageFlagsNVX"/>
+              <type name="VkObjectEntryUsageFlagsNVX"/>
+              <type name="VkIndirectCommandsLayoutUsageFlagBitsNVX"/>
+              <type name="VkIndirectCommandsTokenTypeNVX"/>
+              <type name="VkObjectEntryUsageFlagBitsNVX"/>
+              <type name="VkObjectEntryTypeNVX"/>
+              <type name="VkDeviceGeneratedCommandsFeaturesNVX"/>
+              <type name="VkDeviceGeneratedCommandsLimitsNVX"/>
+              <type name="VkIndirectCommandsTokenNVX"/>
+              <type name="VkIndirectCommandsLayoutTokenNVX"/>
+              <type name="VkIndirectCommandsLayoutCreateInfoNVX"/>
+              <type name="VkCmdProcessCommandsInfoNVX"/>
+              <type name="VkCmdReserveSpaceForCommandsInfoNVX"/>
+              <type name="VkObjectTableCreateInfoNVX"/>
+              <type name="VkObjectTableEntryNVX"/>
+              <type name="VkObjectTablePipelineEntryNVX"/>
+              <type name="VkObjectTableDescriptorSetEntryNVX"/>
+              <type name="VkObjectTableVertexBufferEntryNVX"/>
+              <type name="VkObjectTableIndexBufferEntryNVX"/>
+              <type name="VkObjectTablePushConstantEntryNVX"/>
+              <command name="vkCmdProcessCommandsNVX"/>
+              <command name="vkCmdReserveSpaceForCommandsNVX"/>
+              <command name="vkCreateIndirectCommandsLayoutNVX"/>
+              <command name="vkDestroyIndirectCommandsLayoutNVX"/>
+              <command name="vkCreateObjectTableNVX"/>
+              <command name="vkDestroyObjectTableNVX"/>
+              <command name="vkRegisterObjectsNVX"/>
+              <command name="vkUnregisterObjectsNVX"/>
+              <command name="vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"/>
+          </require>
+        </extension>
          <extension name="VK_KHR_extension_88" number="88" author="NV" contact="Eric Werness @ewerness" supported="disabled">
              <require>
                  <enum value="0"                                         name="VK_KHR_EXTENSION_88_SPEC_VERSION"/>
@@ -4577,5 +4815,89 @@ maintained in the master branch of the Khronos Vulkan GitHub project.
                  <enum value="&quot;VK_EXT_extension_91&quot;"           name="VK_EXT_EXTENSION_91_EXTENSION_NAME"/>
               </require>
          </extension>
+         <extension name="VK_EXT_extension_92" number="92" author="NV" contact="James Jones @cubanismo" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_EXT_EXTENSION_92_SPEC_VERSION"/>
+                 <enum value="&quot;VK_EXT_extension_92&quot;"           name="VK_EXT_EXTENSION_92_EXTENSION_NAME"/>
+              </require>
+         </extension>
+         <extension name="VK_KHR_extension_93" number="93" author="GOOGLE" contact="Ian Elliott @ianelliott" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_GOOGLE_EXTENSION_93_SPEC_VERSION"/>
+                 <enum value="&quot;VK_GOOGLE_extension_93&quot;"        name="VK_GOOGLE_EXTENSION_93_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_KHR_extension_94" number="94" author="Codeplay" contact="Neil Henning @neil_henning" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_KHR_EXTENSION_94_SPEC_VERSION"/>
+                 <enum value="&quot;VK_KHR_extension_94&quot;"           name="VK_KHR_EXTENSION_94_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_95" number="95" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_95_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_95&quot;"            name="VK_NV_EXTENSION_95_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_96" number="96" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_96_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_96&quot;"            name="VK_NV_EXTENSION_96_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_97" number="97" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_97_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_97&quot;"            name="VK_NV_EXTENSION_97_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_98" number="98" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_98_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_98&quot;"            name="VK_NV_EXTENSION_98_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_99" number="99" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_99_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_99&quot;"            name="VK_NV_EXTENSION_99_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_100" number="100" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_100_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_100&quot;"           name="VK_NV_EXTENSION_100_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_101" number="101" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_101_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_101&quot;"           name="VK_NV_EXTENSION_101_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_102" number="102" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_102_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_102&quot;"           name="VK_NV_EXTENSION_102_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_103" number="103" author="NVIDIA" contact="Daniel Koch @dgkoch" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_103_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_103&quot;"           name="VK_NV_EXTENSION_103_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_NV_extension_104" number="104" author="NVIDIA" contact="Mathias Schott @mschott" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_NV_EXTENSION_104_SPEC_VERSION"/>
+                 <enum value="&quot;VK_NV_extension_104&quot;"           name="VK_NV_EXTENSION_104_EXTENSION_NAME"/>
+             </require>
+         </extension>
+         <extension name="VK_EXT_extension_105" number="105" author="GOOGLE" contact="Courtney Goeltzenleuchter @courtneygo" supported="disabled">
+             <require>
+                 <enum value="0"                                         name="VK_KHR_EXTENSION_105_SPEC_VERSION"/>
+                 <enum value="&quot;VK_EXT_extension_105&quot;"           name="VK_KHR_EXTENSION_105_EXTENSION_NAME"/>
+             </require>
+         </extension>
     </extensions>
 </registry>
index 5d71f52..65728c7 100644 (file)
@@ -1344,6 +1344,66 @@ VK_EXT_debug_marker = Extension(
     ],
 )
 
+VK_NVX_device_generated_commands = Extension(
+    name="VK_EXT_debug_marker",
+    headers=["vulkan/vulkan.h"],
+    objects=[
+        "VkObjectTableNVX",
+        "VkIndirectCommandsLayoutNVX",
+    ],
+    protos=[
+        Proto("void", "CmdProcessCommandsNVX",
+            [Param("VkCommandBuffer", "commandBuffer"),
+             Param("VkCmdProcessCommandsInfoNVX*", "pProcessCommandsInfo")]),
+
+        Proto("void", "CmdReserveSpaceForCommandsNV",
+            [Param("VkCommandBuffer", "commandBuffer"),
+             Param("VkCmdReserveSpaceForCommandsInfoNVX*", "pReserveSpaceInfo")]),
+
+        Proto("VkResult", "CreateIndirectCommandsLayoutNVX",
+            [Param("VkDevice", "device"),
+             Param("VkIndirectCommandsLayoutCreateInfoNVX*", "pCreateInfo"),
+             Param("VkAllocationCallbacks*", "pAllocator"),
+             Param("VkIndirectCommandsLayoutNVX*", "pIndirectCommandsLayout")]),
+
+        Proto("void", "DestroyIndirectCommandsLayoutNVX",
+            [Param("VkDevice", "device"),
+             Param("VkIndirectCommandsLayoutNVX", "indirectCommandsLayout"),
+             Param("VkAllocationCallbacks*", "pAllocator")]),
+
+        Proto("VkResult", "CreateObjectTableNVX)",
+            [Param("VkDevice", "device"),
+             Param("VkObjectTableCreateInfoNVX*", "pCreateInfo"),
+             Param("VkAllocationCallbacks*", "pAllocator"),
+             Param("VkObjectTableNVX*", "pObjectTable")]),
+
+        Proto("void", "DestroyObjectTableNVX",
+            [Param("VkDevice", "device"),
+             Param("VkObjectTableNVX", "objectTable"),
+             Param("VkAllocationCallbacks*", "pAllocator")]),
+
+        Proto("VkResult", "RegisterObjectsNVX",
+            [Param("VkDevice", "device"),
+             Param("VkObjectTableNVX", "objectTable"),
+             Param("uint32_t", "objectCount"),
+             Param("VkObjectTableEntryNVX**", "ppObjectTableEntries"),
+             Param("uint32_t*", "pObjectIndices")]),
+
+        Proto("VkResult", "UnregisterObjectsNVX)",
+            [Param("VkDevice", "device"),
+             Param("VkObjectTableNVX*", "objectTable"),
+             Param("uint32_t*", "objectCount"),
+             Param("VkObjectEntryTypeNVX*", "pObjectEntryTypes"),
+             Param("uint32_t*", "pObjectIndices")]),
+
+        Proto("void", "GetPhysicalDeviceGeneratedCommandsPropertiesNVX",
+            [Param("VkPhysicalDevice", "physicalDevice"),
+             Param("VkDeviceGeneratedCommandsFeaturesNVX*", "pFeatures"),
+             Param("VkDeviceGeneratedCommandsLimitsNVX*", "pLimits")]),
+    ],
+)
+
+
 import sys
 
 wsi_linux = ['Xcb', 'Xlib', 'Wayland', 'Mir', 'Display']
@@ -1398,6 +1458,7 @@ non_exported_exts = [VK_NV_external_memory_capabilities,
 #                    VK_AMD_gpu_shader_half_float,
 #                    VK_AMD_shader_ballot,
 #                    VK_IMG_format_pvrtc,
+#                    VK_NVX_device_generated_commands,
                     ]
 
 extensions = common_exts