Update for Vulkan-Docs 1.2.170
authorJon Leech <oddhack@sonic.net>
Mon, 15 Feb 2021 08:55:18 +0000 (00:55 -0800)
committerJon Leech <devrel@oddhack.org>
Mon, 15 Feb 2021 08:56:59 +0000 (00:56 -0800)
include/vulkan/vulkan.hpp
include/vulkan/vulkan_core.h
registry/generator.py
registry/validusage.json
registry/vk.xml

index 08816ab..12520bd 100644 (file)
@@ -94,7 +94,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
 #endif
 
 
-static_assert( VK_HEADER_VERSION ==  169 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION ==  170 , "Wrong VK_HEADER_VERSION!" );
 
 // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
 // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -1742,6 +1742,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
     }
 
+    void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
+    }
+
     void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
@@ -1767,6 +1772,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdResetEvent( commandBuffer, event, stageMask );
     }
 
+    void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
+    }
+
     void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
@@ -1852,6 +1862,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdSetEvent( commandBuffer, event, stageMask );
     }
 
+    void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
+    }
+
     void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
@@ -1992,6 +2007,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
     }
 
+    void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
+    }
+
     void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
@@ -2002,6 +2022,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
     }
 
+    void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
+    }
+
     void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
@@ -2012,6 +2037,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
     }
 
+    void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
+    }
+
     VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkCompileDeferredNV( device, pipeline, shader );
@@ -3217,6 +3247,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
     }
 
+    void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
     void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
@@ -3397,6 +3432,11 @@ namespace VULKAN_HPP_NAMESPACE
       return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
     }
 
+    VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
+    }
+
     VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
     {
       return ::vkQueueWaitIdle( queue );
@@ -3924,6 +3964,7 @@ namespace VULKAN_HPP_NAMESPACE
     eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
     eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
     eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eNoneKHR = VK_ACCESS_NONE_KHR,
     eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
     eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
     eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR};
@@ -3960,6 +4001,85 @@ namespace VULKAN_HPP_NAMESPACE
       case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
       case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
       case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
+      case AccessFlagBits::eNoneKHR : return "NoneKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+  }
+
+  enum class AccessFlagBits2KHR : VkAccessFlags2KHR
+  {
+    e2None = VK_ACCESS_2_NONE_KHR,
+    e2IndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR,
+    e2IndexRead = VK_ACCESS_2_INDEX_READ_BIT_KHR,
+    e2VertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR,
+    e2UniformRead = VK_ACCESS_2_UNIFORM_READ_BIT_KHR,
+    e2InputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR,
+    e2ShaderRead = VK_ACCESS_2_SHADER_READ_BIT_KHR,
+    e2ShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT_KHR,
+    e2ColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR,
+    e2ColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR,
+    e2DepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR,
+    e2DepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR,
+    e2TransferRead = VK_ACCESS_2_TRANSFER_READ_BIT_KHR,
+    e2TransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR,
+    e2HostRead = VK_ACCESS_2_HOST_READ_BIT_KHR,
+    e2HostWrite = VK_ACCESS_2_HOST_WRITE_BIT_KHR,
+    e2MemoryRead = VK_ACCESS_2_MEMORY_READ_BIT_KHR,
+    e2MemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT_KHR,
+    e2ShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR,
+    e2ShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR,
+    e2ShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR,
+    e2TransformFeedbackWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    e2TransformFeedbackCounterReadExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    e2TransformFeedbackCounterWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    e2ConditionalRenderingReadExt = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    e2CommandPreprocessReadNv = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
+    e2CommandPreprocessWriteNv = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    e2FragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    e2AccelerationStructureRead = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    e2AccelerationStructureWrite = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    e2FragmentDensityMapReadExt = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    e2ColorAttachmentReadNoncoherentExt = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    e2AccelerationStructureReadNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    e2AccelerationStructureWriteNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    e2ShadingRateImageReadNv = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV};
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value )
+  {
+    switch ( value )
+    {
+      case AccessFlagBits2KHR::e2None : return "2None";
+      case AccessFlagBits2KHR::e2IndirectCommandRead : return "2IndirectCommandRead";
+      case AccessFlagBits2KHR::e2IndexRead : return "2IndexRead";
+      case AccessFlagBits2KHR::e2VertexAttributeRead : return "2VertexAttributeRead";
+      case AccessFlagBits2KHR::e2UniformRead : return "2UniformRead";
+      case AccessFlagBits2KHR::e2InputAttachmentRead : return "2InputAttachmentRead";
+      case AccessFlagBits2KHR::e2ShaderRead : return "2ShaderRead";
+      case AccessFlagBits2KHR::e2ShaderWrite : return "2ShaderWrite";
+      case AccessFlagBits2KHR::e2ColorAttachmentRead : return "2ColorAttachmentRead";
+      case AccessFlagBits2KHR::e2ColorAttachmentWrite : return "2ColorAttachmentWrite";
+      case AccessFlagBits2KHR::e2DepthStencilAttachmentRead : return "2DepthStencilAttachmentRead";
+      case AccessFlagBits2KHR::e2DepthStencilAttachmentWrite : return "2DepthStencilAttachmentWrite";
+      case AccessFlagBits2KHR::e2TransferRead : return "2TransferRead";
+      case AccessFlagBits2KHR::e2TransferWrite : return "2TransferWrite";
+      case AccessFlagBits2KHR::e2HostRead : return "2HostRead";
+      case AccessFlagBits2KHR::e2HostWrite : return "2HostWrite";
+      case AccessFlagBits2KHR::e2MemoryRead : return "2MemoryRead";
+      case AccessFlagBits2KHR::e2MemoryWrite : return "2MemoryWrite";
+      case AccessFlagBits2KHR::e2ShaderSampledRead : return "2ShaderSampledRead";
+      case AccessFlagBits2KHR::e2ShaderStorageRead : return "2ShaderStorageRead";
+      case AccessFlagBits2KHR::e2ShaderStorageWrite : return "2ShaderStorageWrite";
+      case AccessFlagBits2KHR::e2TransformFeedbackWriteExt : return "2TransformFeedbackWriteExt";
+      case AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt : return "2TransformFeedbackCounterReadExt";
+      case AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt : return "2TransformFeedbackCounterWriteExt";
+      case AccessFlagBits2KHR::e2ConditionalRenderingReadExt : return "2ConditionalRenderingReadExt";
+      case AccessFlagBits2KHR::e2CommandPreprocessReadNv : return "2CommandPreprocessReadNv";
+      case AccessFlagBits2KHR::e2CommandPreprocessWriteNv : return "2CommandPreprocessWriteNv";
+      case AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead : return "2FragmentShadingRateAttachmentRead";
+      case AccessFlagBits2KHR::e2AccelerationStructureRead : return "2AccelerationStructureRead";
+      case AccessFlagBits2KHR::e2AccelerationStructureWrite : return "2AccelerationStructureWrite";
+      case AccessFlagBits2KHR::e2FragmentDensityMapReadExt : return "2FragmentDensityMapReadExt";
+      case AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt : return "2ColorAttachmentReadNoncoherentExt";
       default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
     }
   }
@@ -5255,6 +5375,19 @@ namespace VULKAN_HPP_NAMESPACE
     }
   }
 
+  enum class EventCreateFlagBits : VkEventCreateFlags
+  {
+    eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR};
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case EventCreateFlagBits::eDeviceOnlyKHR : return "DeviceOnlyKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+  }
+
   enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
   {
     eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
@@ -6354,6 +6487,8 @@ namespace VULKAN_HPP_NAMESPACE
     eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
     eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
     eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+    eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR,
+    eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
     eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
     eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
     eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
@@ -6385,6 +6520,8 @@ namespace VULKAN_HPP_NAMESPACE
       case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
       case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
       case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
+      case ImageLayout::eReadOnlyOptimalKHR : return "ReadOnlyOptimalKHR";
+      case ImageLayout::eAttachmentOptimalKHR : return "AttachmentOptimalKHR";
       default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
     }
   }
@@ -7210,6 +7347,7 @@ namespace VULKAN_HPP_NAMESPACE
     eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
     eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
     eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+    eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR,
     eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
     eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
     eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV};
@@ -7244,6 +7382,90 @@ namespace VULKAN_HPP_NAMESPACE
       case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
       case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
       case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
+      case PipelineStageFlagBits::eNoneKHR : return "NoneKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+  }
+
+  enum class PipelineStageFlagBits2KHR : VkPipelineStageFlags2KHR
+  {
+    e2None = VK_PIPELINE_STAGE_2_NONE_KHR,
+    e2TopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
+    e2DrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR,
+    e2VertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR,
+    e2VertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR,
+    e2TessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR,
+    e2TessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR,
+    e2GeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR,
+    e2FragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR,
+    e2EarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR,
+    e2LateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR,
+    e2ColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR,
+    e2ComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR,
+    e2AllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR,
+    e2BottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
+    e2Host = VK_PIPELINE_STAGE_2_HOST_BIT_KHR,
+    e2AllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR,
+    e2AllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR,
+    e2Copy = VK_PIPELINE_STAGE_2_COPY_BIT_KHR,
+    e2Resolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR,
+    e2Blit = VK_PIPELINE_STAGE_2_BLIT_BIT_KHR,
+    e2Clear = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR,
+    e2IndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR,
+    e2VertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR,
+    e2PreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR,
+    e2TransformFeedbackExt = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
+    e2ConditionalRenderingExt = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
+    e2CommandPreprocessNv = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
+    e2FragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    e2AccelerationStructureBuild = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    e2RayTracingShader = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
+    e2FragmentDensityProcessExt = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    e2TaskShaderNv = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
+    e2MeshShaderNv = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+    e2AccelerationStructureBuildNv = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    e2RayTracingShaderNv = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
+    e2ShadingRateImageNv = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+    e2Transfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2KHR value )
+  {
+    switch ( value )
+    {
+      case PipelineStageFlagBits2KHR::e2None : return "2None";
+      case PipelineStageFlagBits2KHR::e2TopOfPipe : return "2TopOfPipe";
+      case PipelineStageFlagBits2KHR::e2DrawIndirect : return "2DrawIndirect";
+      case PipelineStageFlagBits2KHR::e2VertexInput : return "2VertexInput";
+      case PipelineStageFlagBits2KHR::e2VertexShader : return "2VertexShader";
+      case PipelineStageFlagBits2KHR::e2TessellationControlShader : return "2TessellationControlShader";
+      case PipelineStageFlagBits2KHR::e2TessellationEvaluationShader : return "2TessellationEvaluationShader";
+      case PipelineStageFlagBits2KHR::e2GeometryShader : return "2GeometryShader";
+      case PipelineStageFlagBits2KHR::e2FragmentShader : return "2FragmentShader";
+      case PipelineStageFlagBits2KHR::e2EarlyFragmentTests : return "2EarlyFragmentTests";
+      case PipelineStageFlagBits2KHR::e2LateFragmentTests : return "2LateFragmentTests";
+      case PipelineStageFlagBits2KHR::e2ColorAttachmentOutput : return "2ColorAttachmentOutput";
+      case PipelineStageFlagBits2KHR::e2ComputeShader : return "2ComputeShader";
+      case PipelineStageFlagBits2KHR::e2AllTransfer : return "2AllTransfer";
+      case PipelineStageFlagBits2KHR::e2BottomOfPipe : return "2BottomOfPipe";
+      case PipelineStageFlagBits2KHR::e2Host : return "2Host";
+      case PipelineStageFlagBits2KHR::e2AllGraphics : return "2AllGraphics";
+      case PipelineStageFlagBits2KHR::e2AllCommands : return "2AllCommands";
+      case PipelineStageFlagBits2KHR::e2Copy : return "2Copy";
+      case PipelineStageFlagBits2KHR::e2Resolve : return "2Resolve";
+      case PipelineStageFlagBits2KHR::e2Blit : return "2Blit";
+      case PipelineStageFlagBits2KHR::e2Clear : return "2Clear";
+      case PipelineStageFlagBits2KHR::e2IndexInput : return "2IndexInput";
+      case PipelineStageFlagBits2KHR::e2VertexAttributeInput : return "2VertexAttributeInput";
+      case PipelineStageFlagBits2KHR::e2PreRasterizationShaders : return "2PreRasterizationShaders";
+      case PipelineStageFlagBits2KHR::e2TransformFeedbackExt : return "2TransformFeedbackExt";
+      case PipelineStageFlagBits2KHR::e2ConditionalRenderingExt : return "2ConditionalRenderingExt";
+      case PipelineStageFlagBits2KHR::e2CommandPreprocessNv : return "2CommandPreprocessNv";
+      case PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment : return "2FragmentShadingRateAttachment";
+      case PipelineStageFlagBits2KHR::e2AccelerationStructureBuild : return "2AccelerationStructureBuild";
+      case PipelineStageFlagBits2KHR::e2RayTracingShader : return "2RayTracingShader";
+      case PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt : return "2FragmentDensityProcessExt";
+      case PipelineStageFlagBits2KHR::e2TaskShaderNv : return "2TaskShaderNv";
+      case PipelineStageFlagBits2KHR::e2MeshShaderNv : return "2MeshShaderNv";
       default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
     }
   }
@@ -8636,6 +8858,16 @@ namespace VULKAN_HPP_NAMESPACE
     ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
     ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
     eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+    eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
+    eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
+    eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
+    eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
+    eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
+    eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
+    ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
+    eQueueFamilyCheckpointProperties2Nv = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+    eCheckpointData2Nv = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
     ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
     ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
     ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
@@ -9307,6 +9539,16 @@ namespace VULKAN_HPP_NAMESPACE
       case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
       case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
       case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
+      case StructureType::eMemoryBarrier2KHR : return "MemoryBarrier2KHR";
+      case StructureType::eBufferMemoryBarrier2KHR : return "BufferMemoryBarrier2KHR";
+      case StructureType::eImageMemoryBarrier2KHR : return "ImageMemoryBarrier2KHR";
+      case StructureType::eDependencyInfoKHR : return "DependencyInfoKHR";
+      case StructureType::eSubmitInfo2KHR : return "SubmitInfo2KHR";
+      case StructureType::eSemaphoreSubmitInfoKHR : return "SemaphoreSubmitInfoKHR";
+      case StructureType::eCommandBufferSubmitInfoKHR : return "CommandBufferSubmitInfoKHR";
+      case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR : return "PhysicalDeviceSynchronization2FeaturesKHR";
+      case StructureType::eQueueFamilyCheckpointProperties2Nv : return "QueueFamilyCheckpointProperties2Nv";
+      case StructureType::eCheckpointData2Nv : return "CheckpointData2Nv";
       case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR : return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR";
       case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV : return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
       case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV : return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
@@ -9366,6 +9608,19 @@ namespace VULKAN_HPP_NAMESPACE
     }
   }
 
+  enum class SubmitFlagBitsKHR : VkSubmitFlagsKHR
+  {
+    eProtected = VK_SUBMIT_PROTECTED_BIT_KHR};
+
+  VULKAN_HPP_INLINE std::string to_string( SubmitFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SubmitFlagBitsKHR::eProtected : return "Protected";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+  }
+
   enum class SubpassContents
   {
     eInline = VK_SUBPASS_CONTENTS_INLINE,
@@ -9794,7 +10049,8 @@ VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) |
 VkFlags(AccessFlagBits::eShadingRateImageReadNV) |
 VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) |
 VkFlags(AccessFlagBits::eCommandPreprocessReadNV) |
-VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
+VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) |
+VkFlags(AccessFlagBits::eNoneKHR)
     };
   };
 
@@ -9856,6 +10112,108 @@ VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
   }
 
 
+  using AccessFlags2KHR = Flags<AccessFlagBits2KHR>;
+
+  template <> struct FlagTraits<AccessFlagBits2KHR>
+  {
+    enum : VkFlags64
+    {
+      allFlags = VkFlags64(AccessFlagBits2KHR::e2None) |
+VkFlags64(AccessFlagBits2KHR::e2IndirectCommandRead) |
+VkFlags64(AccessFlagBits2KHR::e2IndexRead) |
+VkFlags64(AccessFlagBits2KHR::e2VertexAttributeRead) |
+VkFlags64(AccessFlagBits2KHR::e2UniformRead) |
+VkFlags64(AccessFlagBits2KHR::e2InputAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderWrite) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentWrite) |
+VkFlags64(AccessFlagBits2KHR::e2DepthStencilAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2DepthStencilAttachmentWrite) |
+VkFlags64(AccessFlagBits2KHR::e2TransferRead) |
+VkFlags64(AccessFlagBits2KHR::e2TransferWrite) |
+VkFlags64(AccessFlagBits2KHR::e2HostRead) |
+VkFlags64(AccessFlagBits2KHR::e2HostWrite) |
+VkFlags64(AccessFlagBits2KHR::e2MemoryRead) |
+VkFlags64(AccessFlagBits2KHR::e2MemoryWrite) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderSampledRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderStorageRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderStorageWrite) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackWriteExt) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt) |
+VkFlags64(AccessFlagBits2KHR::e2ConditionalRenderingReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2CommandPreprocessReadNv) |
+VkFlags64(AccessFlagBits2KHR::e2CommandPreprocessWriteNv) |
+VkFlags64(AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2AccelerationStructureRead) |
+VkFlags64(AccessFlagBits2KHR::e2AccelerationStructureWrite) |
+VkFlags64(AccessFlagBits2KHR::e2FragmentDensityMapReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator|( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags2KHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator&( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags2KHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator^( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags2KHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator~( AccessFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( AccessFlags2KHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlags2KHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & AccessFlagBits2KHR::e2IndirectCommandRead ) result += "2IndirectCommandRead | ";
+    if ( value & AccessFlagBits2KHR::e2IndexRead ) result += "2IndexRead | ";
+    if ( value & AccessFlagBits2KHR::e2VertexAttributeRead ) result += "2VertexAttributeRead | ";
+    if ( value & AccessFlagBits2KHR::e2UniformRead ) result += "2UniformRead | ";
+    if ( value & AccessFlagBits2KHR::e2InputAttachmentRead ) result += "2InputAttachmentRead | ";
+    if ( value & AccessFlagBits2KHR::e2ShaderRead ) result += "2ShaderRead | ";
+    if ( value & AccessFlagBits2KHR::e2ShaderWrite ) result += "2ShaderWrite | ";
+    if ( value & AccessFlagBits2KHR::e2ColorAttachmentRead ) result += "2ColorAttachmentRead | ";
+    if ( value & AccessFlagBits2KHR::e2ColorAttachmentWrite ) result += "2ColorAttachmentWrite | ";
+    if ( value & AccessFlagBits2KHR::e2DepthStencilAttachmentRead ) result += "2DepthStencilAttachmentRead | ";
+    if ( value & AccessFlagBits2KHR::e2DepthStencilAttachmentWrite ) result += "2DepthStencilAttachmentWrite | ";
+    if ( value & AccessFlagBits2KHR::e2TransferRead ) result += "2TransferRead | ";
+    if ( value & AccessFlagBits2KHR::e2TransferWrite ) result += "2TransferWrite | ";
+    if ( value & AccessFlagBits2KHR::e2HostRead ) result += "2HostRead | ";
+    if ( value & AccessFlagBits2KHR::e2HostWrite ) result += "2HostWrite | ";
+    if ( value & AccessFlagBits2KHR::e2MemoryRead ) result += "2MemoryRead | ";
+    if ( value & AccessFlagBits2KHR::e2MemoryWrite ) result += "2MemoryWrite | ";
+    if ( value & AccessFlagBits2KHR::e2ShaderSampledRead ) result += "2ShaderSampledRead | ";
+    if ( value & AccessFlagBits2KHR::e2ShaderStorageRead ) result += "2ShaderStorageRead | ";
+    if ( value & AccessFlagBits2KHR::e2ShaderStorageWrite ) result += "2ShaderStorageWrite | ";
+    if ( value & AccessFlagBits2KHR::e2TransformFeedbackWriteExt ) result += "2TransformFeedbackWriteExt | ";
+    if ( value & AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt ) result += "2TransformFeedbackCounterReadExt | ";
+    if ( value & AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt ) result += "2TransformFeedbackCounterWriteExt | ";
+    if ( value & AccessFlagBits2KHR::e2ConditionalRenderingReadExt ) result += "2ConditionalRenderingReadExt | ";
+    if ( value & AccessFlagBits2KHR::e2CommandPreprocessReadNv ) result += "2CommandPreprocessReadNv | ";
+    if ( value & AccessFlagBits2KHR::e2CommandPreprocessWriteNv ) result += "2CommandPreprocessWriteNv | ";
+    if ( value & AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead ) result += "2FragmentShadingRateAttachmentRead | ";
+    if ( value & AccessFlagBits2KHR::e2AccelerationStructureRead ) result += "2AccelerationStructureRead | ";
+    if ( value & AccessFlagBits2KHR::e2AccelerationStructureWrite ) result += "2AccelerationStructureWrite | ";
+    if ( value & AccessFlagBits2KHR::e2FragmentDensityMapReadExt ) result += "2FragmentDensityMapReadExt | ";
+    if ( value & AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt ) result += "2ColorAttachmentReadNoncoherentExt | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
   using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
 
   VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR  )
@@ -11126,20 +11484,45 @@ VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
     return "{}";
   }
 
-  enum class EventCreateFlagBits : VkFlags
-  {};
 
-  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
+  using EventCreateFlags = Flags<EventCreateFlagBits>;
+
+  template <> struct FlagTraits<EventCreateFlagBits>
   {
-    return "(void)";
+    enum : VkFlags
+    {
+      allFlags = VkFlags(EventCreateFlagBits::eDeviceOnlyKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return EventCreateFlags( bit0 ) | bit1;
   }
 
-  using EventCreateFlags = Flags<EventCreateFlagBits>;
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return EventCreateFlags( bit0 ) & bit1;
+  }
 
-  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags  )
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
   {
+    return EventCreateFlags( bit0 ) ^ bit1;
+  }
 
-    return "{}";
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( EventCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & EventCreateFlagBits::eDeviceOnlyKHR ) result += "DeviceOnlyKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
   }
 
 
@@ -12989,7 +13372,8 @@ VkFlags(PipelineStageFlagBits::eShadingRateImageNV) |
 VkFlags(PipelineStageFlagBits::eTaskShaderNV) |
 VkFlags(PipelineStageFlagBits::eMeshShaderNV) |
 VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) |
-VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
+VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) |
+VkFlags(PipelineStageFlagBits::eNoneKHR)
     };
   };
 
@@ -13048,6 +13432,112 @@ VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
     return "{ " + result.substr(0, result.size() - 3) + " }";
   }
 
+
+  using PipelineStageFlags2KHR = Flags<PipelineStageFlagBits2KHR>;
+
+  template <> struct FlagTraits<PipelineStageFlagBits2KHR>
+  {
+    enum : VkFlags64
+    {
+      allFlags = VkFlags64(PipelineStageFlagBits2KHR::e2None) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TopOfPipe) |
+VkFlags64(PipelineStageFlagBits2KHR::e2DrawIndirect) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TessellationControlShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TessellationEvaluationShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2GeometryShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2EarlyFragmentTests) |
+VkFlags64(PipelineStageFlagBits2KHR::e2LateFragmentTests) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ColorAttachmentOutput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ComputeShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllTransfer) |
+VkFlags64(PipelineStageFlagBits2KHR::e2BottomOfPipe) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Host) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllGraphics) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllCommands) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Copy) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Resolve) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Blit) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Clear) |
+VkFlags64(PipelineStageFlagBits2KHR::e2IndexInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexAttributeInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2PreRasterizationShaders) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TransformFeedbackExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ConditionalRenderingExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2CommandPreprocessNv) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AccelerationStructureBuild) |
+VkFlags64(PipelineStageFlagBits2KHR::e2RayTracingShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TaskShaderNv) |
+VkFlags64(PipelineStageFlagBits2KHR::e2MeshShaderNv)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator|( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags2KHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator&( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags2KHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator^( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags2KHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator~( PipelineStageFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineStageFlags2KHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2KHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineStageFlagBits2KHR::e2TopOfPipe ) result += "2TopOfPipe | ";
+    if ( value & PipelineStageFlagBits2KHR::e2DrawIndirect ) result += "2DrawIndirect | ";
+    if ( value & PipelineStageFlagBits2KHR::e2VertexInput ) result += "2VertexInput | ";
+    if ( value & PipelineStageFlagBits2KHR::e2VertexShader ) result += "2VertexShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2TessellationControlShader ) result += "2TessellationControlShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2TessellationEvaluationShader ) result += "2TessellationEvaluationShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2GeometryShader ) result += "2GeometryShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2FragmentShader ) result += "2FragmentShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2EarlyFragmentTests ) result += "2EarlyFragmentTests | ";
+    if ( value & PipelineStageFlagBits2KHR::e2LateFragmentTests ) result += "2LateFragmentTests | ";
+    if ( value & PipelineStageFlagBits2KHR::e2ColorAttachmentOutput ) result += "2ColorAttachmentOutput | ";
+    if ( value & PipelineStageFlagBits2KHR::e2ComputeShader ) result += "2ComputeShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2AllTransfer ) result += "2AllTransfer | ";
+    if ( value & PipelineStageFlagBits2KHR::e2BottomOfPipe ) result += "2BottomOfPipe | ";
+    if ( value & PipelineStageFlagBits2KHR::e2Host ) result += "2Host | ";
+    if ( value & PipelineStageFlagBits2KHR::e2AllGraphics ) result += "2AllGraphics | ";
+    if ( value & PipelineStageFlagBits2KHR::e2AllCommands ) result += "2AllCommands | ";
+    if ( value & PipelineStageFlagBits2KHR::e2Copy ) result += "2Copy | ";
+    if ( value & PipelineStageFlagBits2KHR::e2Resolve ) result += "2Resolve | ";
+    if ( value & PipelineStageFlagBits2KHR::e2Blit ) result += "2Blit | ";
+    if ( value & PipelineStageFlagBits2KHR::e2Clear ) result += "2Clear | ";
+    if ( value & PipelineStageFlagBits2KHR::e2IndexInput ) result += "2IndexInput | ";
+    if ( value & PipelineStageFlagBits2KHR::e2VertexAttributeInput ) result += "2VertexAttributeInput | ";
+    if ( value & PipelineStageFlagBits2KHR::e2PreRasterizationShaders ) result += "2PreRasterizationShaders | ";
+    if ( value & PipelineStageFlagBits2KHR::e2TransformFeedbackExt ) result += "2TransformFeedbackExt | ";
+    if ( value & PipelineStageFlagBits2KHR::e2ConditionalRenderingExt ) result += "2ConditionalRenderingExt | ";
+    if ( value & PipelineStageFlagBits2KHR::e2CommandPreprocessNv ) result += "2CommandPreprocessNv | ";
+    if ( value & PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment ) result += "2FragmentShadingRateAttachment | ";
+    if ( value & PipelineStageFlagBits2KHR::e2AccelerationStructureBuild ) result += "2AccelerationStructureBuild | ";
+    if ( value & PipelineStageFlagBits2KHR::e2RayTracingShader ) result += "2RayTracingShader | ";
+    if ( value & PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt ) result += "2FragmentDensityProcessExt | ";
+    if ( value & PipelineStageFlagBits2KHR::e2TaskShaderNv ) result += "2TaskShaderNv | ";
+    if ( value & PipelineStageFlagBits2KHR::e2MeshShaderNv ) result += "2MeshShaderNv | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
   enum class PipelineTessellationStateCreateFlagBits : VkFlags
   {};
 
@@ -13910,6 +14400,47 @@ VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
   }
 
 
+  using SubmitFlagsKHR = Flags<SubmitFlagBitsKHR>;
+
+  template <> struct FlagTraits<SubmitFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SubmitFlagBitsKHR::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator|( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubmitFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator&( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubmitFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator^( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubmitFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator~( SubmitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SubmitFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubmitFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SubmitFlagBitsKHR::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
   using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
 
   template <> struct FlagTraits<SubpassDescriptionFlagBits>
@@ -23004,6 +23535,152 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = BufferMemoryBarrier;
   };
 
+  struct BufferMemoryBarrier2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryBarrier2KHR( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferMemoryBarrier2KHR( *reinterpret_cast<BufferMemoryBarrier2KHR const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2KHR & operator=( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryBarrier2KHR & operator=( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR const *>( &rhs );
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferMemoryBarrier2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkBufferMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier2KHR*>( this );
+    }
+
+    operator VkBufferMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryBarrier2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferMemoryBarrier2KHR const& ) const = default;
+#else
+    bool operator==( BufferMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( BufferMemoryBarrier2KHR ) == sizeof( VkBufferMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryBarrier2KHR>
+  {
+    using Type = BufferMemoryBarrier2KHR;
+  };
+
   struct BufferMemoryRequirementsInfo2
   {
     static const bool allowDuplicate = false;
@@ -23366,6 +24043,78 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = CalibratedTimestampInfoEXT;
   };
 
+  struct CheckpointData2NV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2Nv;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {}, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
+    : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkCheckpointData2NV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCheckpointData2NV*>( this );
+    }
+
+    operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCheckpointData2NV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CheckpointData2NV const& ) const = default;
+#else
+    bool operator==( CheckpointData2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stage == rhs.stage )
+          && ( pCheckpointMarker == rhs.pCheckpointMarker );
+    }
+
+    bool operator!=( CheckpointData2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2Nv;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage = {};
+    void* pCheckpointMarker = {};
+
+  };
+  static_assert( sizeof( CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCheckpointData2Nv>
+  {
+    using Type = CheckpointData2NV;
+  };
+
   struct CheckpointDataNV
   {
     static const bool allowDuplicate = false;
@@ -24734,73 +25483,80 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
   };
 
-  struct CommandPoolCreateInfo
+  struct ConditionalRenderingBeginInfoEXT
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
+    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
       return *this;
     }
 
-    CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      buffer = buffer_;
       return *this;
     }
 
-    CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
     {
-      queueFamilyIndex = queueFamilyIndex_;
+      offset = offset_;
       return *this;
     }
 
+    ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
 
-    operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+    operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
+      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
     }
 
-    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
+      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( CommandPoolCreateInfo const& ) const = default;
+    auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
 #else
-    bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueFamilyIndex == rhs.queueFamilyIndex );
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( flags == rhs.flags );
     }
 
-    bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -24809,300 +25565,315 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
-    uint32_t queueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
 
   };
-  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
   {
-    using Type = CommandPoolCreateInfo;
+    using Type = ConditionalRenderingBeginInfoEXT;
   };
 
-  class ShaderModule
+  struct DebugUtilsLabelEXT
   {
-  public:
-    using CType = VkShaderModule;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
 
-  public:
-    VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
-      : m_shaderModule(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+    : pLabelName( pLabelName_ ), color( color_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_shaderModule(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
-      : m_shaderModule( shaderModule )
+    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_shaderModule = shaderModule;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
       return *this;
     }
-#endif
 
-    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_shaderModule = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ShaderModule const& ) const = default;
-#else
-    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule == rhs.m_shaderModule;
+      pLabelName = pLabelName_;
+      return *this;
     }
 
-    bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule != rhs.m_shaderModule;
+      color = color_;
+      return *this;
     }
 
-    bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+
+    operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule < rhs.m_shaderModule;
+      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule;
+      return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule != VK_NULL_HANDLE;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pLabelName == rhs.pLabelName )
+          && ( color == rhs.color );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_shaderModule == VK_NULL_HANDLE;
+      return !operator==( rhs );
     }
+#endif
 
-  private:
-    VkShaderModule m_shaderModule;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
-  {
-    using type = VULKAN_HPP_NAMESPACE::ShaderModule;
-  };
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+    const void* pNext = {};
+    const char* pLabelName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
   };
-
+  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = DebugUtilsLabelEXT;
   };
 
-  struct SpecializationMapEntry
+  class QueryPool
   {
+  public:
+    using CType = VkQueryPool;
 
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
-    : constantID( constantID_ ), offset( offset_ ), size( size_ )
+  public:
+    VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
-      : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool( queryPool )
+    {}
 
-    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+      m_queryPool = queryPool;
       return *this;
     }
+#endif
 
-    SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
+    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      constantID = constantID_;
+      m_queryPool = VK_NULL_HANDLE;
       return *this;
     }
 
-    SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPool const& ) const = default;
+#else
+    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      offset = offset_;
-      return *this;
+      return m_queryPool == rhs.m_queryPool;
     }
 
-    SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      size = size_;
-      return *this;
+      return m_queryPool != rhs.m_queryPool;
     }
 
-
-    operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
+    bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+      return m_queryPool < rhs.m_queryPool;
     }
+#endif
 
-    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+      return m_queryPool;
     }
 
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( SpecializationMapEntry const& ) const = default;
-#else
-    bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( constantID == rhs.constantID )
-          && ( offset == rhs.offset )
-          && ( size == rhs.size );
+      return m_queryPool != VK_NULL_HANDLE;
     }
 
-    bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_queryPool == VK_NULL_HANDLE;
     }
-#endif
 
+  private:
+    VkQueryPool m_queryPool;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
+  {
+    using type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
 
-  public:
-    uint32_t constantID = {};
-    uint32_t offset = {};
-    size_t size = {};
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
   };
-  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
 
-  struct SpecializationInfo
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
   {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
 
+  struct RenderPassBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
-    : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
+    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
     {}
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
-    : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
+    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
     {}
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
       return *this;
     }
 
-    SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      mapEntryCount = mapEntryCount_;
+      pNext = pNext_;
       return *this;
     }
 
-    SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
     {
-      pMapEntries = pMapEntries_;
+      renderPass = renderPass_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
-      pMapEntries = mapEntries_.data();
+      framebuffer = framebuffer_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
     {
-      dataSize = dataSize_;
+      renderArea = renderArea_;
       return *this;
     }
 
-    SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pData = pData_;
+      clearValueCount = clearValueCount_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pClearValues = pClearValues_;
       return *this;
     }
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
     {
-      dataSize = data_.size() * sizeof(T);
-      pData = data_.data();
+      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
+      pClearValues = clearValues_.data();
       return *this;
     }
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSpecializationInfo*>( this );
+      return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
     }
 
-    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSpecializationInfo*>( this );
+      return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( SpecializationInfo const& ) const = default;
+    auto operator<=>( RenderPassBeginInfo const& ) const = default;
 #else
-    bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( mapEntryCount == rhs.mapEntryCount )
-          && ( pMapEntries == rhs.pMapEntries )
-          && ( dataSize == rhs.dataSize )
-          && ( pData == rhs.pData );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( renderArea == rhs.renderArea )
+          && ( clearValueCount == rhs.clearValueCount )
+          && ( pClearValues == rhs.pClearValues );
     }
 
-    bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -25111,103 +25882,84 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    uint32_t mapEntryCount = {};
-    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
-    size_t dataSize = {};
-    const void* pData = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+    uint32_t clearValueCount = {};
+    const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
 
   };
-  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct PipelineShaderStageCreateInfo
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+  {
+    using Type = RenderPassBeginInfo;
+  };
+
+  struct SubpassBeginInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
+    : contents( contents_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
+    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
       return *this;
     }
 
-    PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
-    {
-      stage = stage_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
-    {
-      module = module_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pName = pName_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+    SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
     {
-      pSpecializationInfo = pSpecializationInfo_;
+      contents = contents_;
       return *this;
     }
 
 
-    operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+      return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
     }
 
-    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+      return *reinterpret_cast<VkSubpassBeginInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+    auto operator<=>( SubpassBeginInfo const& ) const = default;
 #else
-    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( stage == rhs.stage )
-          && ( module == rhs.module )
-          && ( pName == rhs.pName )
-          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+          && ( contents == rhs.contents );
     }
 
-    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -25216,23 +25968,20 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
-    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
-    VULKAN_HPP_NAMESPACE::ShaderModule module = {};
-    const char* pName = {};
-    const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
+    VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
 
   };
-  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
+  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
   {
-    using Type = PipelineShaderStageCreateInfo;
+    using Type = SubpassBeginInfo;
   };
+  using SubpassBeginInfoKHR = SubpassBeginInfo;
 
   class PipelineLayout
   {
@@ -25334,390 +26083,378 @@ namespace VULKAN_HPP_NAMESPACE
     static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  class Pipeline
+  class DescriptorSet
   {
   public:
-    using CType = VkPipeline;
+    using CType = VkDescriptorSet;
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
 
   public:
-    VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
-      : m_pipeline(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_pipeline(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
-      : m_pipeline( pipeline )
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet( descriptorSet )
     {}
 
 #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+    DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
     {
-      m_pipeline = pipeline;
+      m_descriptorSet = descriptorSet;
       return *this;
     }
 #endif
 
-    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      m_pipeline = VK_NULL_HANDLE;
+      m_descriptorSet = VK_NULL_HANDLE;
       return *this;
     }
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( Pipeline const& ) const = default;
+    auto operator<=>( DescriptorSet const& ) const = default;
 #else
-    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline == rhs.m_pipeline;
+      return m_descriptorSet == rhs.m_descriptorSet;
     }
 
-    bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline != rhs.m_pipeline;
+      return m_descriptorSet != rhs.m_descriptorSet;
     }
 
-    bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline < rhs.m_pipeline;
+      return m_descriptorSet < rhs.m_descriptorSet;
     }
 #endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline;
+      return m_descriptorSet;
     }
 
     explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline != VK_NULL_HANDLE;
+      return m_descriptorSet != VK_NULL_HANDLE;
     }
 
     bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return m_pipeline == VK_NULL_HANDLE;
+      return m_descriptorSet == VK_NULL_HANDLE;
     }
 
   private:
-    VkPipeline m_pipeline;
+    VkDescriptorSet m_descriptorSet;
   };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
 
   template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
   {
-    using type = VULKAN_HPP_NAMESPACE::Pipeline;
+    using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
   };
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
   {
-    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
   };
 
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
   {
-    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
   };
 
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
   {
     static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct ComputePipelineCreateInfo
+  class Pipeline
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
-    {}
+  public:
+    using CType = VkPipeline;
 
-    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
 
-    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
+  public:
+    VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
-    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
 
-    ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
+    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline( pipeline )
+    {}
 
-    ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      m_pipeline = pipeline;
       return *this;
     }
+#endif
 
-    ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      stage = stage_;
+      m_pipeline = VK_NULL_HANDLE;
       return *this;
     }
 
-    ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Pipeline const& ) const = default;
+#else
+    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      layout = layout_;
-      return *this;
+      return m_pipeline == rhs.m_pipeline;
     }
 
-    ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      basePipelineHandle = basePipelineHandle_;
-      return *this;
+      return m_pipeline != rhs.m_pipeline;
     }
 
-    ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      basePipelineIndex = basePipelineIndex_;
-      return *this;
+      return m_pipeline < rhs.m_pipeline;
     }
+#endif
 
-
-    operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+      return m_pipeline;
     }
 
-    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+      return m_pipeline != VK_NULL_HANDLE;
     }
 
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
-#else
-    bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( stage == rhs.stage )
-          && ( layout == rhs.layout )
-          && ( basePipelineHandle == rhs.basePipelineHandle )
-          && ( basePipelineIndex == rhs.basePipelineIndex );
+      return m_pipeline == VK_NULL_HANDLE;
     }
 
-    bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+  private:
+    VkPipeline m_pipeline;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
-    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
-    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
-    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
-    int32_t basePipelineIndex = {};
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
   };
-  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
   {
-    using Type = ComputePipelineCreateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct ConditionalRenderingBeginInfoEXT
+  class ImageView
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+  public:
+    using CType = VkImageView;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
-    : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
-    {}
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
 
-    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+  public:
+    VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
+    {}
 
-    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+      : m_imageView( imageView )
+    {}
 
-    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
+      m_imageView = imageView;
       return *this;
     }
+#endif
 
-    ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_imageView = VK_NULL_HANDLE;
       return *this;
     }
 
-    ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageView const& ) const = default;
+#else
+    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      buffer = buffer_;
-      return *this;
+      return m_imageView == rhs.m_imageView;
     }
 
-    ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      offset = offset_;
-      return *this;
+      return m_imageView != rhs.m_imageView;
     }
 
-    ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
-      return *this;
+      return m_imageView < rhs.m_imageView;
     }
+#endif
 
-
-    operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
+      return m_imageView;
     }
 
-    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
+      return m_imageView != VK_NULL_HANDLE;
     }
 
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
-#else
-    bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( buffer == rhs.buffer )
-          && ( offset == rhs.offset )
-          && ( flags == rhs.flags );
+      return m_imageView == VK_NULL_HANDLE;
     }
 
-    bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+  private:
+    VkImageView m_imageView;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
+  {
+    using type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
-    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
   };
-  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
   {
-    using Type = ConditionalRenderingBeginInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct ConformanceVersion
+  struct ImageBlit
   {
 
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
-    : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
+    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
       return *this;
     }
 
-    ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+    ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      major = major_;
+      srcSubresource = srcSubresource_;
       return *this;
     }
 
-    ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+    ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
     {
-      minor = minor_;
+      srcOffsets = srcOffsets_;
       return *this;
     }
 
-    ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+    ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      subminor = subminor_;
+      dstSubresource = dstSubresource_;
       return *this;
     }
 
-    ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+    ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
     {
-      patch = patch_;
+      dstOffsets = dstOffsets_;
       return *this;
     }
 
 
-    operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
+    operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkConformanceVersion*>( this );
+      return *reinterpret_cast<const VkImageBlit*>( this );
     }
 
-    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkConformanceVersion*>( this );
+      return *reinterpret_cast<VkImageBlit*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ConformanceVersion const& ) const = default;
+    auto operator<=>( ImageBlit const& ) const = default;
 #else
-    bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( major == rhs.major )
-          && ( minor == rhs.minor )
-          && ( subminor == rhs.subminor )
-          && ( patch == rhs.patch );
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffsets == rhs.srcOffsets )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffsets == rhs.dstOffsets );
     }
 
-    bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -25726,125 +26463,94 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    uint8_t major = {};
-    uint8_t minor = {};
-    uint8_t subminor = {};
-    uint8_t patch = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
 
   };
-  static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
-  using ConformanceVersionKHR = ConformanceVersion;
+  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
 
-  struct CooperativeMatrixPropertiesNV
+  struct ImageSubresourceRange
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
+
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
-    : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
+    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
-      return *this;
-    }
-
-    CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
-    {
-      MSize = MSize_;
-      return *this;
-    }
-
-    CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
-    {
-      NSize = NSize_;
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      KSize = KSize_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
       return *this;
     }
 
-    CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      AType = AType_;
+      aspectMask = aspectMask_;
       return *this;
     }
 
-    CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
     {
-      BType = BType_;
+      baseMipLevel = baseMipLevel_;
       return *this;
     }
 
-    CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      CType = CType_;
+      levelCount = levelCount_;
       return *this;
     }
 
-    CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
     {
-      DType = DType_;
+      baseArrayLayer = baseArrayLayer_;
       return *this;
     }
 
-    CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+    ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      scope = scope_;
+      layerCount = layerCount_;
       return *this;
     }
 
 
-    operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+      return *reinterpret_cast<const VkImageSubresourceRange*>( this );
     }
 
-    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+      return *reinterpret_cast<VkImageSubresourceRange*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
+    auto operator<=>( ImageSubresourceRange const& ) const = default;
 #else
-    bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( MSize == rhs.MSize )
-          && ( NSize == rhs.NSize )
-          && ( KSize == rhs.KSize )
-          && ( AType == rhs.AType )
-          && ( BType == rhs.BType )
-          && ( CType == rhs.CType )
-          && ( DType == rhs.DType )
-          && ( scope == rhs.scope );
+      return ( aspectMask == rhs.aspectMask )
+          && ( baseMipLevel == rhs.baseMipLevel )
+          && ( levelCount == rhs.levelCount )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
     }
 
-    bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -25853,26 +26559,15 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
-    void* pNext = {};
-    uint32_t MSize = {};
-    uint32_t NSize = {};
-    uint32_t KSize = {};
-    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
-    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
-    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
-    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
-    VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
-
-  };
-  static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t baseMipLevel = {};
+    uint32_t levelCount = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
 
-  template <>
-  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
-  {
-    using Type = CooperativeMatrixPropertiesNV;
   };
+  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
 
   struct CopyAccelerationStructureInfoKHR
   {
@@ -26302,290 +26997,85 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = CopyBufferToImageInfo2KHR;
   };
 
-  struct CopyCommandTransformInfoQCOM
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
-    : transform( transform_ )
-    {}
-
-    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
-      : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
-    {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
-    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
-      return *this;
-    }
-
-    CopyCommandTransformInfoQCOM & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
-    {
-      transform = transform_;
-      return *this;
-    }
-
-
-    operator VkCopyCommandTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
-    }
-
-    operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
-    }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( CopyCommandTransformInfoQCOM const& ) const = default;
-#else
-    bool operator==( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( transform == rhs.transform );
-    }
-
-    bool operator!=( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
-
-
-
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
-
-  };
-  static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
-
-  template <>
-  struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
-  {
-    using Type = CopyCommandTransformInfoQCOM;
-  };
-
-  class DescriptorSet
-  {
-  public:
-    using CType = VkDescriptorSet;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
-
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
-      : m_descriptorSet(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorSet(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorSet( descriptorSet )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
-    {
-      m_descriptorSet = descriptorSet;
-      return *this;
-    }
-#endif
-
-    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-    {
-      m_descriptorSet = VK_NULL_HANDLE;
-      return *this;
-    }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSet const& ) const = default;
-#else
-    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet == rhs.m_descriptorSet;
-    }
-
-    bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet != rhs.m_descriptorSet;
-    }
-
-    bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet < rhs.m_descriptorSet;
-    }
-#endif
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet;
-    }
-
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorSet == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDescriptorSet m_descriptorSet;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
-  {
-    using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
-  };
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
-  };
-
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
-  };
-
-
-  template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
+  struct ImageCopy
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
-  };
 
-  struct CopyDescriptorSet
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+    VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
-      : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
+    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
-      return *this;
-    }
-
-    CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
-    {
-      srcSet = srcSet_;
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      srcBinding = srcBinding_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
       return *this;
     }
 
-    CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcArrayElement = srcArrayElement_;
+      srcSubresource = srcSubresource_;
       return *this;
     }
 
-    CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstSet = dstSet_;
+      srcOffset = srcOffset_;
       return *this;
     }
 
-    CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstBinding = dstBinding_;
+      dstSubresource = dstSubresource_;
       return *this;
     }
 
-    CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstArrayElement = dstArrayElement_;
+      dstOffset = dstOffset_;
       return *this;
     }
 
-    CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = descriptorCount_;
+      extent = extent_;
       return *this;
     }
 
 
-    operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+      return *reinterpret_cast<const VkImageCopy*>( this );
     }
 
-    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCopyDescriptorSet*>( this );
+      return *reinterpret_cast<VkImageCopy*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( CopyDescriptorSet const& ) const = default;
+    auto operator<=>( ImageCopy const& ) const = default;
 #else
-    bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcSet == rhs.srcSet )
-          && ( srcBinding == rhs.srcBinding )
-          && ( srcArrayElement == rhs.srcArrayElement )
-          && ( dstSet == rhs.dstSet )
-          && ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount );
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
     }
 
-    bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -26594,25 +27084,15 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
-    uint32_t srcBinding = {};
-    uint32_t srcArrayElement = {};
-    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
-    uint32_t dstBinding = {};
-    uint32_t dstArrayElement = {};
-    uint32_t descriptorCount = {};
-
-  };
-  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
 
-  template <>
-  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
-  {
-    using Type = CopyDescriptorSet;
   };
+  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
 
   struct ImageCopy2KHR
   {
@@ -27074,138 +27554,6 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = CopyMemoryToAccelerationStructureInfoKHR;
   };
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct D3D12FenceSubmitInfoKHR
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
-    : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
-    {}
-
-    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
-      : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
-    {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
-    : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
-    {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
-    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pWaitSemaphoreValues = pWaitSemaphoreValues_;
-      return *this;
-    }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
-    {
-      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
-      pWaitSemaphoreValues = waitSemaphoreValues_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pSignalSemaphoreValues = pSignalSemaphoreValues_;
-      return *this;
-    }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
-    {
-      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
-      pSignalSemaphoreValues = signalSemaphoreValues_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
-    operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
-    }
-
-    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
-    }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
-#else
-    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
-          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
-          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
-          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
-    }
-
-    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
-
-
-
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
-    const void* pNext = {};
-    uint32_t waitSemaphoreValuesCount = {};
-    const uint64_t* pWaitSemaphoreValues = {};
-    uint32_t signalSemaphoreValuesCount = {};
-    const uint64_t* pSignalSemaphoreValues = {};
-
-  };
-  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
-  template <>
-  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
-  {
-    using Type = D3D12FenceSubmitInfoKHR;
-  };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
   struct DebugMarkerMarkerInfoEXT
   {
     static const bool allowDuplicate = false;
@@ -27296,80 +27644,59 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = DebugMarkerMarkerInfoEXT;
   };
 
-  struct DebugMarkerObjectNameInfoEXT
+  struct SubpassEndInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
-    : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
+    VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
+    
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
+    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
       return *this;
     }
 
-    DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
-    {
-      object = object_;
-      return *this;
-    }
-
-    DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pObjectName = pObjectName_;
-      return *this;
-    }
-
 
-    operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
+      return *reinterpret_cast<const VkSubpassEndInfo*>( this );
     }
 
-    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
+      return *reinterpret_cast<VkSubpassEndInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default;
+    auto operator<=>( SubpassEndInfo const& ) const = default;
 #else
-    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( pObjectName == rhs.pObjectName );
+          && ( pNext == rhs.pNext );
     }
 
-    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -27378,127 +27705,172 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
-    uint64_t object = {};
-    const char* pObjectName = {};
 
   };
-  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
+  struct CppType<StructureType, StructureType::eSubpassEndInfo>
   {
-    using Type = DebugMarkerObjectNameInfoEXT;
+    using Type = SubpassEndInfo;
   };
+  using SubpassEndInfoKHR = SubpassEndInfo;
 
-  struct DebugMarkerObjectTagInfoEXT
+  class IndirectCommandsLayoutNV
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
+  public:
+    using CType = VkIndirectCommandsLayoutNV;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
-    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
     {}
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
-    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
-    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
+      m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
       return *this;
     }
+#endif
 
-    DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      objectType = objectType_;
-      return *this;
+      return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
     }
 
-    DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      object = object_;
-      return *this;
+      return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
     }
 
-    DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      tagName = tagName_;
-      return *this;
+      return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
     }
+#endif
 
-    DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
     {
-      tagSize = tagSize_;
+      return m_indirectCommandsLayoutNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
+  {
+    using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct IndirectCommandsStreamNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
       return *this;
     }
 
-    DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      pTag = pTag_;
+      buffer = buffer_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
     {
-      tagSize = tag_.size() * sizeof(T);
-      pTag = tag_.data();
+      offset = offset_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
+      return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
     }
 
-    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
+      return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default;
+    auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
 #else
-    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( tagName == rhs.tagName )
-          && ( tagSize == rhs.tagSize )
-          && ( pTag == rhs.pTag );
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
     }
 
-    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -27507,189 +27879,172 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
-    uint64_t object = {};
-    uint64_t tagName = {};
-    size_t tagSize = {};
-    const void* pTag = {};
-
-  };
-  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
 
-  template <>
-  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
-  {
-    using Type = DebugMarkerObjectTagInfoEXT;
   };
+  static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
 
-  struct DebugReportCallbackCreateInfoEXT
+  struct GeneratedCommandsInfoNV
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
+    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
+    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
       return *this;
     }
 
-    DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      pipelineBindPoint = pipelineBindPoint_;
       return *this;
     }
 
-    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
     {
-      pfnCallback = pfnCallback_;
+      pipeline = pipeline_;
       return *this;
     }
 
-    DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      pUserData = pUserData_;
+      indirectCommandsLayout = indirectCommandsLayout_;
       return *this;
     }
 
-
-    operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
+      streamCount = streamCount_;
+      return *this;
     }
 
-    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
+      pStreams = pStreams_;
+      return *this;
     }
 
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default;
-#else
-    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pfnCallback == rhs.pfnCallback )
-          && ( pUserData == rhs.pUserData );
+      streamCount = static_cast<uint32_t>( streams_.size() );
+      pStreams = streams_.data();
+      return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      sequencesCount = sequencesCount_;
+      return *this;
     }
-#endif
-
-
-
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
-    PFN_vkDebugReportCallbackEXT pfnCallback = {};
-    void* pUserData = {};
-
-  };
-  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-
-  template <>
-  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
-  {
-    using Type = DebugReportCallbackCreateInfoEXT;
-  };
-
-  struct DebugUtilsLabelEXT
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
-    : pLabelName( pLabelName_ ), color( color_ )
-    {}
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessBuffer = preprocessBuffer_;
+      return *this;
+    }
 
-    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
-    {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessOffset = preprocessOffset_;
+      return *this;
+    }
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessSize = preprocessSize_;
+      return *this;
+    }
 
-    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
+      sequencesCountBuffer = sequencesCountBuffer_;
       return *this;
     }
 
-    DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      sequencesCountOffset = sequencesCountOffset_;
       return *this;
     }
 
-    DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      pLabelName = pLabelName_;
+      sequencesIndexBuffer = sequencesIndexBuffer_;
       return *this;
     }
 
-    DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      color = color_;
+      sequencesIndexOffset = sequencesIndexOffset_;
       return *this;
     }
 
 
-    operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
+      return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
     }
 
-    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
+      return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
+    auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
 #else
-    bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( pLabelName == rhs.pLabelName )
-          && ( color == rhs.color );
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipeline == rhs.pipeline )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( streamCount == rhs.streamCount )
+          && ( pStreams == rhs.pStreams )
+          && ( sequencesCount == rhs.sequencesCount )
+          && ( preprocessBuffer == rhs.preprocessBuffer )
+          && ( preprocessOffset == rhs.preprocessOffset )
+          && ( preprocessSize == rhs.preprocessSize )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
     }
 
-    bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -27698,95 +28053,99 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
     const void* pNext = {};
-    const char* pLabelName = {};
-    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+    uint32_t streamCount = {};
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
+    uint32_t sequencesCount = {};
+    VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
 
   };
-  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
+  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
   {
-    using Type = DebugUtilsLabelEXT;
+    using Type = GeneratedCommandsInfoNV;
   };
 
-  struct DebugUtilsObjectNameInfoEXT
+  struct MemoryBarrier
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
-    : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
+    VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
+    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
       return *this;
     }
 
-    DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      objectHandle = objectHandle_;
+      srcAccessMask = srcAccessMask_;
       return *this;
     }
 
-    DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      pObjectName = pObjectName_;
+      dstAccessMask = dstAccessMask_;
       return *this;
     }
 
 
-    operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
+      return *reinterpret_cast<const VkMemoryBarrier*>( this );
     }
 
-    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
+      return *reinterpret_cast<VkMemoryBarrier*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default;
+    auto operator<=>( MemoryBarrier const& ) const = default;
 #else
-    bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( objectHandle == rhs.objectHandle )
-          && ( pObjectName == rhs.pObjectName );
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
     }
 
-    bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -27795,178 +28154,130 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
-    uint64_t objectHandle = {};
-    const char* pObjectName = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
 
   };
-  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
+  struct CppType<StructureType, StructureType::eMemoryBarrier>
   {
-    using Type = DebugUtilsObjectNameInfoEXT;
+    using Type = MemoryBarrier;
   };
 
-  struct DebugUtilsMessengerCallbackDataEXT
+  struct ImageMemoryBarrier
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
-    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
+    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      srcAccessMask = srcAccessMask_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      pMessageIdName = pMessageIdName_;
+      dstAccessMask = dstAccessMask_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      messageIdNumber = messageIdNumber_;
+      oldLayout = oldLayout_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      pMessage = pMessage_;
+      newLayout = newLayout_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      queueLabelCount = queueLabelCount_;
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      pQueueLabels = pQueueLabels_;
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
     {
-      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
-      pQueueLabels = queueLabels_.data();
+      image = image_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
     {
-      cmdBufLabelCount = cmdBufLabelCount_;
+      subresourceRange = subresourceRange_;
       return *this;
     }
 
-    DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pCmdBufLabels = pCmdBufLabels_;
-      return *this;
-    }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
     {
-      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
-      pCmdBufLabels = cmdBufLabels_.data();
-      return *this;
+      return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
     {
-      objectCount = objectCount_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pObjects = pObjects_;
-      return *this;
-    }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
-    {
-      objectCount = static_cast<uint32_t>( objects_.size() );
-      pObjects = objects_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
-    operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
-    }
-
-    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
+      return *reinterpret_cast<VkImageMemoryBarrier*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default;
+    auto operator<=>( ImageMemoryBarrier const& ) const = default;
 #else
-    bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pMessageIdName == rhs.pMessageIdName )
-          && ( messageIdNumber == rhs.messageIdNumber )
-          && ( pMessage == rhs.pMessage )
-          && ( queueLabelCount == rhs.queueLabelCount )
-          && ( pQueueLabels == rhs.pQueueLabels )
-          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
-          && ( pCmdBufLabels == rhs.pCmdBufLabels )
-          && ( objectCount == rhs.objectCount )
-          && ( pObjects == rhs.pObjects );
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
     }
 
-    bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -27975,117 +28286,108 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
-    const char* pMessageIdName = {};
-    int32_t messageIdNumber = {};
-    const char* pMessage = {};
-    uint32_t queueLabelCount = {};
-    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {};
-    uint32_t cmdBufLabelCount = {};
-    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
-    uint32_t objectCount = {};
-    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
 
   };
-  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
+  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
   {
-    using Type = DebugUtilsMessengerCallbackDataEXT;
+    using Type = ImageMemoryBarrier;
   };
 
-  struct DebugUtilsMessengerCreateInfoEXT
+  struct MemoryBarrier2KHR
   {
-    static const bool allowDuplicate = true;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2KHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
+    MemoryBarrier2KHR( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryBarrier2KHR( *reinterpret_cast<MemoryBarrier2KHR const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2KHR & operator=( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & operator=( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR const *>( &rhs );
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      messageSeverity = messageSeverity_;
+      srcStageMask = srcStageMask_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      messageType = messageType_;
+      srcAccessMask = srcAccessMask_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      pfnUserCallback = pfnUserCallback_;
+      dstStageMask = dstStageMask_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    MemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      pUserData = pUserData_;
+      dstAccessMask = dstAccessMask_;
       return *this;
     }
 
 
-    operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
+      return *reinterpret_cast<const VkMemoryBarrier2KHR*>( this );
     }
 
-    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
+      return *reinterpret_cast<VkMemoryBarrier2KHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default;
+    auto operator<=>( MemoryBarrier2KHR const& ) const = default;
 #else
-    bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( MemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( messageSeverity == rhs.messageSeverity )
-          && ( messageType == rhs.messageType )
-          && ( pfnUserCallback == rhs.pfnUserCallback )
-          && ( pUserData == rhs.pUserData );
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
     }
 
-    bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( MemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -28094,129 +28396,146 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2KHR;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
-    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
-    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
-    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
-    void* pUserData = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
 
   };
-  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( MemoryBarrier2KHR ) == sizeof( VkMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
+  struct CppType<StructureType, StructureType::eMemoryBarrier2KHR>
   {
-    using Type = DebugUtilsMessengerCreateInfoEXT;
+    using Type = MemoryBarrier2KHR;
   };
 
-  struct DebugUtilsObjectTagInfoEXT
+  struct ImageMemoryBarrier2KHR
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2KHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
-    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
-    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    ImageMemoryBarrier2KHR( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageMemoryBarrier2KHR( *reinterpret_cast<ImageMemoryBarrier2KHR const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2KHR & operator=( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & operator=( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR const *>( &rhs );
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      objectType = objectType_;
+      srcStageMask = srcStageMask_;
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      objectHandle = objectHandle_;
+      srcAccessMask = srcAccessMask_;
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      tagName = tagName_;
+      dstStageMask = dstStageMask_;
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      tagSize = tagSize_;
+      dstAccessMask = dstAccessMask_;
       return *this;
     }
 
-    DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      pTag = pTag_;
+      oldLayout = oldLayout_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    template <typename T>
-    DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      tagSize = tag_.size() * sizeof(T);
-      pTag = tag_.data();
+      newLayout = newLayout_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+    ImageMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
 
-    operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
     }
 
-    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    ImageMemoryBarrier2KHR & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
+      image = image_;
+      return *this;
+    }
+
+    ImageMemoryBarrier2KHR & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+
+    operator VkImageMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier2KHR*>( this );
+    }
+
+    operator VkImageMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryBarrier2KHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default;
+    auto operator<=>( ImageMemoryBarrier2KHR const& ) const = default;
 #else
-    bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( objectHandle == rhs.objectHandle )
-          && ( tagName == rhs.tagName )
-          && ( tagSize == rhs.tagSize )
-          && ( pTag == rhs.pTag );
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
     }
 
-    bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -28225,84 +28544,164 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2KHR;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
-    uint64_t objectHandle = {};
-    uint64_t tagName = {};
-    size_t tagSize = {};
-    const void* pTag = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
 
   };
-  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ImageMemoryBarrier2KHR ) == sizeof( VkImageMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
+  struct CppType<StructureType, StructureType::eImageMemoryBarrier2KHR>
   {
-    using Type = DebugUtilsObjectTagInfoEXT;
+    using Type = ImageMemoryBarrier2KHR;
   };
 
-  struct DedicatedAllocationBufferCreateInfoNV
+  struct DependencyInfoKHR
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfoKHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
-    : dedicatedAllocation( dedicatedAllocation_ )
+    VULKAN_HPP_CONSTEXPR DependencyInfoKHR(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers_ = {}) VULKAN_HPP_NOEXCEPT
+    : dependencyFlags( dependencyFlags_ ), memoryBarrierCount( memoryBarrierCount_ ), pMemoryBarriers( pMemoryBarriers_ ), bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ), pBufferMemoryBarriers( pBufferMemoryBarriers_ ), imageMemoryBarrierCount( imageMemoryBarrierCount_ ), pImageMemoryBarriers( pImageMemoryBarriers_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DependencyInfoKHR( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
+    DependencyInfoKHR( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DependencyInfoKHR( *reinterpret_cast<DependencyInfoKHR const *>( &rhs ) )
     {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DependencyInfoKHR( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const & memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const & bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const & imageMemoryBarriers_ = {} )
+    : dependencyFlags( dependencyFlags_ ), memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) ), pMemoryBarriers( memoryBarriers_.data() ), bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) ), pBufferMemoryBarriers( bufferMemoryBarriers_.data() ), imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) ), pImageMemoryBarriers( imageMemoryBarriers_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfoKHR & operator=( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    DependencyInfoKHR & operator=( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfoKHR const *>( &rhs );
       return *this;
     }
 
-    DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DependencyInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    DependencyInfoKHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
     {
-      dedicatedAllocation = dedicatedAllocation_;
+      dependencyFlags = dependencyFlags_;
       return *this;
     }
 
+    DependencyInfoKHR & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryBarrierCount = memoryBarrierCount_;
+      return *this;
+    }
 
-    operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    DependencyInfoKHR & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
+      pMemoryBarriers = pMemoryBarriers_;
+      return *this;
     }
 
-    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DependencyInfoKHR & setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
+      memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
+      pMemoryBarriers = memoryBarriers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DependencyInfoKHR & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
+      return *this;
+    }
+
+    DependencyInfoKHR & setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferMemoryBarriers = pBufferMemoryBarriers_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DependencyInfoKHR & setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
+      pBufferMemoryBarriers = bufferMemoryBarriers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DependencyInfoKHR & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageMemoryBarrierCount = imageMemoryBarrierCount_;
+      return *this;
+    }
+
+    DependencyInfoKHR & setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageMemoryBarriers = pImageMemoryBarriers_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DependencyInfoKHR & setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
+      pImageMemoryBarriers = imageMemoryBarriers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDependencyInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDependencyInfoKHR*>( this );
+    }
+
+    operator VkDependencyInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDependencyInfoKHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default;
+    auto operator<=>( DependencyInfoKHR const& ) const = default;
 #else
-    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DependencyInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+          && ( dependencyFlags == rhs.dependencyFlags )
+          && ( memoryBarrierCount == rhs.memoryBarrierCount )
+          && ( pMemoryBarriers == rhs.pMemoryBarriers )
+          && ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount )
+          && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers )
+          && ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount )
+          && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
     }
 
-    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DependencyInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -28311,169 +28710,191 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfoKHR;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+    uint32_t memoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers = {};
+    uint32_t bufferMemoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers = {};
+    uint32_t imageMemoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers = {};
 
   };
-  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DependencyInfoKHR ) == sizeof( VkDependencyInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DependencyInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
+  struct CppType<StructureType, StructureType::eDependencyInfoKHR>
   {
-    using Type = DedicatedAllocationBufferCreateInfoNV;
+    using Type = DependencyInfoKHR;
   };
 
-  struct DedicatedAllocationImageCreateInfoNV
+  class Sampler
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+  public:
+    using CType = VkSampler;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
-    : dedicatedAllocation( dedicatedAllocation_ )
-    {}
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
 
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+  public:
+    VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
+    {}
 
-    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+      : m_sampler( sampler )
+    {}
 
-    DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
+      m_sampler = sampler;
       return *this;
     }
+#endif
 
-    DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_sampler = VK_NULL_HANDLE;
       return *this;
     }
 
-    DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Sampler const& ) const = default;
+#else
+    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      dedicatedAllocation = dedicatedAllocation_;
-      return *this;
+      return m_sampler == rhs.m_sampler;
     }
 
-
-    operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
+      return m_sampler != rhs.m_sampler;
     }
 
-    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
+      return m_sampler < rhs.m_sampler;
     }
+#endif
 
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler;
+    }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default;
-#else
-    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+      return m_sampler != VK_NULL_HANDLE;
     }
 
-    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_sampler == VK_NULL_HANDLE;
     }
-#endif
 
+  private:
+    VkSampler m_sampler;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
   };
-  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
   {
-    using Type = DedicatedAllocationImageCreateInfoNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct DedicatedAllocationMemoryAllocateInfoNV
+  struct DescriptorImageInfo
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
-    : image( image_ ), buffer( buffer_ )
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
+    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
       return *this;
     }
 
-    DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      sampler = sampler_;
       return *this;
     }
 
-    DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
     {
-      image = image_;
+      imageView = imageView_;
       return *this;
     }
 
-    DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      buffer = buffer_;
+      imageLayout = imageLayout_;
       return *this;
     }
 
 
-    operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+      return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
     }
 
-    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+      return *reinterpret_cast<VkDescriptorImageInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default;
+    auto operator<=>( DescriptorImageInfo const& ) const = default;
 #else
-    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image )
-          && ( buffer == rhs.buffer );
+      return ( sampler == rhs.sampler )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
     }
 
-    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -28482,20 +28903,13 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Image image = {};
-    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
-
-  };
-  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
 
-  template <>
-  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
-  {
-    using Type = DedicatedAllocationMemoryAllocateInfoNV;
   };
+  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
 
   struct DescriptorBufferInfo
   {
@@ -28578,345 +28992,566 @@ namespace VULKAN_HPP_NAMESPACE
   static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
   static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
 
-  class Sampler
+  class BufferView
   {
   public:
-    using CType = VkSampler;
+    using CType = VkBufferView;
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
 
   public:
-    VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
-      : m_sampler(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_sampler(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
-      : m_sampler( sampler )
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView( bufferView )
     {}
 
 #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+    BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
     {
-      m_sampler = sampler;
+      m_bufferView = bufferView;
       return *this;
     }
 #endif
 
-    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      m_sampler = VK_NULL_HANDLE;
+      m_bufferView = VK_NULL_HANDLE;
       return *this;
     }
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( Sampler const& ) const = default;
+    auto operator<=>( BufferView const& ) const = default;
 #else
-    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler == rhs.m_sampler;
+      return m_bufferView == rhs.m_bufferView;
     }
 
-    bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler != rhs.m_sampler;
+      return m_bufferView != rhs.m_bufferView;
     }
 
-    bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler < rhs.m_sampler;
+      return m_bufferView < rhs.m_bufferView;
     }
 #endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler;
+      return m_bufferView;
     }
 
     explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler != VK_NULL_HANDLE;
+      return m_bufferView != VK_NULL_HANDLE;
     }
 
     bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return m_sampler == VK_NULL_HANDLE;
+      return m_bufferView == VK_NULL_HANDLE;
     }
 
   private:
-    VkSampler m_sampler;
+    VkBufferView m_bufferView;
   };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
 
   template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
   {
-    using type = VULKAN_HPP_NAMESPACE::Sampler;
+    using type = VULKAN_HPP_NAMESPACE::BufferView;
   };
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
   {
-    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
   };
 
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
   {
-    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
   };
 
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
   {
     static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  class ImageView
+  struct WriteDescriptorSet
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
+    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
+    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
+#else
+      if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
+      return *this;
+    }
+
+    WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageInfo = pImageInfo_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
+      pImageInfo = imageInfo_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferInfo = pBufferInfo_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
+      pBufferInfo = bufferInfo_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTexelBufferView = pTexelBufferView_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
+      pTexelBufferView = texelBufferView_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+    }
+
+    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( WriteDescriptorSet const& ) const = default;
+#else
+    bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( pImageInfo == rhs.pImageInfo )
+          && ( pBufferInfo == rhs.pBufferInfo )
+          && ( pTexelBufferView == rhs.pTexelBufferView );
+    }
+
+    bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
+    const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
+
+  };
+  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
+  {
+    using Type = WriteDescriptorSet;
+  };
+
+  class DescriptorUpdateTemplate
   {
   public:
-    using CType = VkImageView;
+    using CType = VkDescriptorUpdateTemplate;
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
 
   public:
-    VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
-      : m_imageView(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_imageView(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
-      : m_imageView( imageView )
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
     {}
 
 #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
     {
-      m_imageView = imageView;
+      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
       return *this;
     }
 #endif
 
-    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      m_imageView = VK_NULL_HANDLE;
+      m_descriptorUpdateTemplate = VK_NULL_HANDLE;
       return *this;
     }
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageView const& ) const = default;
+    auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
 #else
-    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView == rhs.m_imageView;
+      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
     }
 
-    bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView != rhs.m_imageView;
+      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
     }
 
-    bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView < rhs.m_imageView;
+      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
     }
 #endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView;
+      return m_descriptorUpdateTemplate;
     }
 
     explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView != VK_NULL_HANDLE;
+      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
     }
 
     bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return m_imageView == VK_NULL_HANDLE;
+      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
     }
 
   private:
-    VkImageView m_imageView;
+    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
   };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
 
   template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
   {
-    using type = VULKAN_HPP_NAMESPACE::ImageView;
+    using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
   };
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
   {
-    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
   };
 
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
   {
-    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
   };
 
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
   {
     static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
+  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
 
-  struct DescriptorImageInfo
+  class Event
   {
+  public:
+    using CType = VkEvent;
 
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
-    : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
+  public:
+    VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+      : m_event( event )
+    {}
 
-    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
+      m_event = event;
       return *this;
     }
+#endif
 
-    DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      sampler = sampler_;
+      m_event = VK_NULL_HANDLE;
       return *this;
     }
 
-    DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Event const& ) const = default;
+#else
+    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      imageView = imageView_;
-      return *this;
+      return m_event == rhs.m_event;
     }
 
-    DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      imageLayout = imageLayout_;
-      return *this;
+      return m_event != rhs.m_event;
     }
 
-
-    operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
+    bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
+      return m_event < rhs.m_event;
     }
+#endif
 
-    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorImageInfo*>( this );
+      return m_event;
     }
 
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorImageInfo const& ) const = default;
-#else
-    bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sampler == rhs.sampler )
-          && ( imageView == rhs.imageView )
-          && ( imageLayout == rhs.imageLayout );
+      return m_event != VK_NULL_HANDLE;
     }
 
-    bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_event == VK_NULL_HANDLE;
     }
-#endif
 
+  private:
+    VkEvent m_event;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Event;
+  };
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
-    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
-    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
 
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
   };
-  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct DescriptorPoolSize
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct ImageResolve
   {
 
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
-    : type( type_ ), descriptorCount( descriptorCount_ )
+    VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
+    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
       return *this;
     }
 
-    DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      srcSubresource = srcSubresource_;
       return *this;
     }
 
-    DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = descriptorCount_;
+      srcOffset = srcOffset_;
       return *this;
     }
 
+    ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
 
-    operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
+    ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
+      dstOffset = dstOffset_;
+      return *this;
     }
 
-    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
+    ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorPoolSize*>( this );
+      extent = extent_;
+      return *this;
+    }
+
+
+    operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve*>( this );
+    }
+
+    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorPoolSize const& ) const = default;
+    auto operator<=>( ImageResolve const& ) const = default;
 #else
-    bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( type == rhs.type )
-          && ( descriptorCount == rhs.descriptorCount );
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
     }
 
-    bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -28925,109 +29560,104 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
-    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
 
   };
-  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
 
-  struct DescriptorPoolCreateInfo
+  struct ImageResolve2KHR
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
+    VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
-    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+    ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
       return *this;
     }
 
-    DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      srcSubresource = srcSubresource_;
       return *this;
     }
 
-    DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      maxSets = maxSets_;
+      srcOffset = srcOffset_;
       return *this;
     }
 
-    DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
     {
-      poolSizeCount = poolSizeCount_;
+      dstSubresource = dstSubresource_;
       return *this;
     }
 
-    DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      pPoolSizes = pPoolSizes_;
+      dstOffset = dstOffset_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
+    ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
     {
-      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
-      pPoolSizes = poolSizes_.data();
+      extent = extent_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
+      return *reinterpret_cast<const VkImageResolve2KHR*>( this );
     }
 
-    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
+      return *reinterpret_cast<VkImageResolve2KHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorPoolCreateInfo const& ) const = default;
+    auto operator<=>( ImageResolve2KHR const& ) const = default;
 #else
-    bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( maxSets == rhs.maxSets )
-          && ( poolSizeCount == rhs.poolSizeCount )
-          && ( pPoolSizes == rhs.pPoolSizes );
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
     }
 
-    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29036,83 +29666,134 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
-    uint32_t maxSets = {};
-    uint32_t poolSizeCount = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
 
   };
-  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
+  struct CppType<StructureType, StructureType::eImageResolve2KHR>
   {
-    using Type = DescriptorPoolCreateInfo;
+    using Type = ImageResolve2KHR;
   };
 
-  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
+  struct ResolveImageInfo2KHR
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
-    : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorPoolInlineUniformBlockCreateInfoEXT( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs ) )
+    ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
       return *this;
     }
 
-    DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
+    ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
     {
-      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
+      srcImage = srcImage_;
       return *this;
     }
 
+    ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
 
-    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+      dstImage = dstImage_;
+      return *this;
     }
 
-    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
+    }
+
+    operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default;
+    auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
 #else
-    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
     }
 
-    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29121,309 +29802,265 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
     const void* pNext = {};
-    uint32_t maxInlineUniformBlockBindings = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
 
   };
-  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
+  struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
   {
-    using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
+    using Type = ResolveImageInfo2KHR;
   };
 
-  class DescriptorPool
+  struct PerformanceMarkerInfoINTEL
   {
-  public:
-    using CType = VkDescriptorPool;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
 
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
-      : m_descriptorPool(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+    : marker( marker_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorPool(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorPool( descriptorPool )
+    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_descriptorPool = descriptorPool;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
       return *this;
     }
-#endif
 
-    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_descriptorPool = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorPool const& ) const = default;
-#else
-    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorPool == rhs.m_descriptorPool;
+      marker = marker_;
+      return *this;
     }
 
-    bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorPool != rhs.m_descriptorPool;
-    }
 
-    bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorPool < rhs.m_descriptorPool;
+      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorPool;
+      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorPool != VK_NULL_HANDLE;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorPool == VK_NULL_HANDLE;
+      return !operator==( rhs );
     }
+#endif
 
-  private:
-    VkDescriptorPool m_descriptorPool;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorPool>
-  {
-    using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
-  };
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+    const void* pNext = {};
+    uint64_t marker = {};
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
   };
-
+  static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = PerformanceMarkerInfoINTEL;
   };
 
-  class DescriptorSetLayout
+  struct PerformanceOverrideInfoINTEL
   {
-  public:
-    using CType = VkDescriptorSetLayout;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
 
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
-      : m_descriptorSetLayout(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), enable( enable_ ), parameter( parameter_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorSetLayout(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorSetLayout( descriptorSetLayout )
+    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_descriptorSetLayout = descriptorSetLayout;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
       return *this;
     }
-#endif
 
-    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_descriptorSetLayout = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetLayout const& ) const = default;
-#else
-    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+      type = type_;
+      return *this;
     }
 
-    bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+      enable = enable_;
+      return *this;
     }
 
-    bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+      parameter = parameter_;
+      return *this;
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+
+    operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout;
+      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout != VK_NULL_HANDLE;
+      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorSetLayout == VK_NULL_HANDLE;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( enable == rhs.enable )
+          && ( parameter == rhs.parameter );
     }
 
-  private:
-    VkDescriptorSetLayout m_descriptorSetLayout;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+    bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSetLayout>
-  {
-    using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
-  };
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+    VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+    uint64_t parameter = {};
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
   };
-
+  static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = PerformanceOverrideInfoINTEL;
   };
 
-  struct DescriptorSetAllocateInfo
+  struct PerformanceStreamMarkerInfoINTEL
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
-    : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+    : marker( marker_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
-    : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
       return *this;
     }
 
-    DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorPool = descriptorPool_;
-      return *this;
-    }
-
-    DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorSetCount = descriptorSetCount_;
-      return *this;
-    }
-
-    DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pSetLayouts = pSetLayouts_;
-      return *this;
-    }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+    PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
-      pSetLayouts = setLayouts_.data();
+      marker = marker_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
+      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
     }
 
-    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
+      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetAllocateInfo const& ) const = default;
+    auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
 #else
-    bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( descriptorPool == rhs.descriptorPool )
-          && ( descriptorSetCount == rhs.descriptorSetCount )
-          && ( pSetLayouts == rhs.pSetLayouts );
+          && ( marker == rhs.marker );
     }
 
-    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29432,116 +30069,106 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
-    uint32_t descriptorSetCount = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+    uint32_t marker = {};
 
   };
-  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
+  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
   {
-    using Type = DescriptorSetAllocateInfo;
+    using Type = PerformanceStreamMarkerInfoINTEL;
   };
 
-  struct DescriptorSetLayoutBinding
+  struct Viewport
   {
 
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
-    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
+    VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
-    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
+    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
       return *this;
     }
 
-    DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
     {
-      binding = binding_;
+      x = x_;
       return *this;
     }
 
-    DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorType = descriptorType_;
+      y = y_;
       return *this;
     }
 
-    DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = descriptorCount_;
+      width = width_;
       return *this;
     }
 
-    DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
     {
-      stageFlags = stageFlags_;
+      height = height_;
       return *this;
     }
 
-    DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
     {
-      pImmutableSamplers = pImmutableSamplers_;
+      minDepth = minDepth_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
-      pImmutableSamplers = immutableSamplers_.data();
+      maxDepth = maxDepth_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
+    operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
+      return *reinterpret_cast<const VkViewport*>( this );
     }
 
-    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+    operator VkViewport &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
+      return *reinterpret_cast<VkViewport*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetLayoutBinding const& ) const = default;
+    auto operator<=>( Viewport const& ) const = default;
 #else
-    bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( binding == rhs.binding )
-          && ( descriptorType == rhs.descriptorType )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( stageFlags == rhs.stageFlags )
-          && ( pImmutableSamplers == rhs.pImmutableSamplers );
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( minDepth == rhs.minDepth )
+          && ( maxDepth == rhs.maxDepth );
     }
 
-    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29550,98 +30177,90 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    uint32_t binding = {};
-    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
-    uint32_t descriptorCount = {};
-    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
-    const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
+    float x = {};
+    float y = {};
+    float width = {};
+    float height = {};
+    float minDepth = {};
+    float maxDepth = {};
 
   };
-  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
 
-  struct DescriptorSetLayoutBindingFlagsCreateInfo
+  struct ShadingRatePaletteNV
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
-    : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
+    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
     {}
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
-    : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
+    ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+    : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
     {}
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
       return *this;
     }
 
-    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      bindingCount = bindingCount_;
+      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
       return *this;
     }
 
-    DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
     {
-      pBindingFlags = pBindingFlags_;
+      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
       return *this;
     }
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
     {
-      bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
-      pBindingFlags = bindingFlags_.data();
+      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
+      pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
       return *this;
     }
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+      return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
     }
 
-    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+      return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default;
+    auto operator<=>( ShadingRatePaletteNV const& ) const = default;
 #else
-    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( bindingCount == rhs.bindingCount )
-          && ( pBindingFlags == rhs.pBindingFlags );
+      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
     }
 
-    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29650,111 +30269,71 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
-    const void* pNext = {};
-    uint32_t bindingCount = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
+    uint32_t shadingRatePaletteEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
 
   };
-  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
+  struct ViewportWScalingNV
   {
-    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
-  };
-  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
 
-  struct DescriptorSetLayoutCreateInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
+    : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
-    : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
-      return *this;
-    }
-
-    DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      bindingCount = bindingCount_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
       return *this;
     }
 
-    DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
+    ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
     {
-      pBindings = pBindings_;
+      xcoeff = xcoeff_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
+    ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
     {
-      bindingCount = static_cast<uint32_t>( bindings_.size() );
-      pBindings = bindings_.data();
+      ycoeff = ycoeff_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
+      return *reinterpret_cast<const VkViewportWScalingNV*>( this );
     }
 
-    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
+      return *reinterpret_cast<VkViewportWScalingNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default;
+    auto operator<=>( ViewportWScalingNV const& ) const = default;
 #else
-    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( bindingCount == rhs.bindingCount )
-          && ( pBindings == rhs.pBindings );
+      return ( xcoeff == rhs.xcoeff )
+          && ( ycoeff == rhs.ycoeff );
     }
 
-    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29763,70 +30342,78 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
-    uint32_t bindingCount = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
+    float xcoeff = {};
+    float ycoeff = {};
 
   };
-  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
+  struct StridedDeviceAddressRegionKHR
   {
-    using Type = DescriptorSetLayoutCreateInfo;
-  };
 
-  struct DescriptorSetLayoutSupport
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
-    : supported( supported_ )
+    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
+    StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
+      return *this;
+    }
+
+    StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
       return *this;
     }
 
+    StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
 
-    operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
+    StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
+      size = size_;
+      return *this;
     }
 
-    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+
+    operator VkStridedDeviceAddressRegionKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
+      return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
+    }
+
+    operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetLayoutSupport const& ) const = default;
+    auto operator<=>( StridedDeviceAddressRegionKHR const& ) const = default;
 #else
-    bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( supported == rhs.supported );
+      return ( deviceAddress == rhs.deviceAddress )
+          && ( stride == rhs.stride )
+          && ( size == rhs.size );
     }
 
-    bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -29835,1321 +30422,1008 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
-    void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
 
   };
-  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
+  class CommandBuffer
   {
-    using Type = DescriptorSetLayoutSupport;
-  };
-  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+  public:
+    using CType = VkCommandBuffer;
 
-  struct DescriptorSetVariableDescriptorCountAllocateInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
-    : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
+  public:
+    VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
     {}
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
-    : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer( commandBuffer )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
+      m_commandBuffer = commandBuffer;
       return *this;
     }
+#endif
 
-    DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_commandBuffer = VK_NULL_HANDLE;
       return *this;
     }
 
-    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBuffer const& ) const = default;
+#else
+    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      descriptorSetCount = descriptorSetCount_;
-      return *this;
+      return m_commandBuffer == rhs.m_commandBuffer;
     }
 
-    DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      pDescriptorCounts = pDescriptorCounts_;
-      return *this;
+      return m_commandBuffer != rhs.m_commandBuffer;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
-      pDescriptorCounts = descriptorCounts_.data();
-      return *this;
+      return m_commandBuffer < rhs.m_commandBuffer;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif
 
 
-    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default;
-#else
-    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( descriptorSetCount == rhs.descriptorSetCount )
-          && ( pDescriptorCounts == rhs.pDescriptorCounts );
-    }
 
-    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
-    const void* pNext = {};
-    uint32_t descriptorSetCount = {};
-    const uint32_t* pDescriptorCounts = {};
 
-  };
-  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
-  {
-    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
-  };
-  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
 
-  struct DescriptorSetVariableDescriptorCountLayoutSupport
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
-    : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
-    {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
-    {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default;
-#else
-    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
-    }
 
-    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
-    void* pNext = {};
-    uint32_t maxVariableDescriptorCount = {};
 
-  };
-  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
-  {
-    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
-  };
-  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
 
-  struct DescriptorUpdateTemplateEntry
-  {
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
-    : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
-    {}
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
-    {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dstBinding = dstBinding_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dstArrayElement = dstArrayElement_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorType = descriptorType_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
-    {
-      offset = offset_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
-    {
-      stride = stride_;
-      return *this;
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
-    }
 
-    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default;
-#else
-    bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( descriptorType == rhs.descriptorType )
-          && ( offset == rhs.offset )
-          && ( stride == rhs.stride );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-  public:
-    uint32_t dstBinding = {};
-    uint32_t dstArrayElement = {};
-    uint32_t descriptorCount = {};
-    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
-    size_t offset = {};
-    size_t stride = {};
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  };
-  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
-  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
 
-  struct DescriptorUpdateTemplateCreateInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
-    {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
-    {}
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
-    : flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
-    {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
-      pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
-    {
-      templateType = templateType_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorSetLayout = descriptorSetLayout_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pipelineBindPoint = pipelineBindPoint_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pipelineLayout = pipelineLayout_;
-      return *this;
-    }
 
-    DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
-    {
-      set = set_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-    operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default;
-#else
-    bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
-          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
-          && ( templateType == rhs.templateType )
-          && ( descriptorSetLayout == rhs.descriptorSetLayout )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( pipelineLayout == rhs.pipelineLayout )
-          && ( set == rhs.set );
-    }
 
-    bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
-    uint32_t descriptorUpdateEntryCount = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {};
-    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
-    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
-    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
-    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
-    uint32_t set = {};
 
-  };
-  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template <>
-  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
-  {
-    using Type = DescriptorUpdateTemplateCreateInfo;
-  };
-  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
 
-  struct DeviceQueueCreateInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerEndEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
-    {}
 
-    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
-    {}
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
-    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
-    {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
 
-    DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
-    {
-      queueFamilyIndex = queueFamilyIndex_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      queueCount = queueCount_;
-      return *this;
-    }
 
-    DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pQueuePriorities = pQueuePriorities_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
-    {
-      queueCount = static_cast<uint32_t>( queuePriorities_.size() );
-      pQueuePriorities = queuePriorities_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
-    }
 
-    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
-#else
-    bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueFamilyIndex == rhs.queueFamilyIndex )
-          && ( queueCount == rhs.queueCount )
-          && ( pQueuePriorities == rhs.pQueuePriorities );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
-    uint32_t queueFamilyIndex = {};
-    uint32_t queueCount = {};
-    const float* pQueuePriorities = {};
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  };
-  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
-  {
-    using Type = DeviceQueueCreateInfo;
-  };
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  struct PhysicalDeviceFeatures
-  {
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
-    : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
-    {}
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
-    {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
-    {
-      robustBufferAccess = robustBufferAccess_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
-    {
-      fullDrawIndexUint32 = fullDrawIndexUint32_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
-    {
-      imageCubeArray = imageCubeArray_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
-    {
-      independentBlend = independentBlend_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
-    {
-      geometryShader = geometryShader_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endConditionalRenderingEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
-    {
-      tessellationShader = tessellationShader_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sampleRateShading = sampleRateShading_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dualSrcBlend = dualSrcBlend_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
-    {
-      logicOp = logicOp_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
-    {
-      multiDrawIndirect = multiDrawIndirect_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
-    {
-      drawIndirectFirstInstance = drawIndirectFirstInstance_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
-    {
-      depthClamp = depthClamp_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
-    {
-      depthBiasClamp = depthBiasClamp_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
-    {
-      fillModeNonSolid = fillModeNonSolid_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
-    {
-      depthBounds = depthBounds_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
-    {
-      wideLines = wideLines_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
-    {
-      largePoints = largePoints_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
-    {
-      alphaToOne = alphaToOne_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
-    {
-      multiViewport = multiViewport_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
-    {
-      samplerAnisotropy = samplerAnisotropy_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
-    {
-      textureCompressionETC2 = textureCompressionETC2_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
-    {
-      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
-    {
-      textureCompressionBC = textureCompressionBC_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
-    {
-      occlusionQueryPrecise = occlusionQueryPrecise_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pipelineStatisticsQuery = pipelineStatisticsQuery_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
-    {
-      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
-    {
-      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderImageGatherExtended = shaderImageGatherExtended_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageImageMultisample = shaderStorageImageMultisample_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderClipDistance = shaderClipDistance_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderCullDistance = shaderCullDistance_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderFloat64 = shaderFloat64_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderInt64 = shaderInt64_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderInt16 = shaderInt16_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderResourceResidency = shaderResourceResidency_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
-    {
-      shaderResourceMinLod = shaderResourceMinLod_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseBinding = sparseBinding_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidencyBuffer = sparseResidencyBuffer_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidencyImage2D = sparseResidencyImage2D_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidencyImage3D = sparseResidencyImage3D_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidency2Samples = sparseResidency2Samples_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidency4Samples = sparseResidency4Samples_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidency8Samples = sparseResidency8Samples_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidency16Samples = sparseResidency16Samples_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sparseResidencyAliased = sparseResidencyAliased_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
-    {
-      variableMultisampleRate = variableMultisampleRate_;
-      return *this;
-    }
 
-    PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
-    {
-      inheritedQueries = inheritedQueries_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setBlendConstants( const float blendConstants[4], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
 
-    operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
-#else
-    bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return ( robustBufferAccess == rhs.robustBufferAccess )
-          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
-          && ( imageCubeArray == rhs.imageCubeArray )
-          && ( independentBlend == rhs.independentBlend )
-          && ( geometryShader == rhs.geometryShader )
-          && ( tessellationShader == rhs.tessellationShader )
-          && ( sampleRateShading == rhs.sampleRateShading )
-          && ( dualSrcBlend == rhs.dualSrcBlend )
-          && ( logicOp == rhs.logicOp )
-          && ( multiDrawIndirect == rhs.multiDrawIndirect )
-          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
-          && ( depthClamp == rhs.depthClamp )
-          && ( depthBiasClamp == rhs.depthBiasClamp )
-          && ( fillModeNonSolid == rhs.fillModeNonSolid )
-          && ( depthBounds == rhs.depthBounds )
-          && ( wideLines == rhs.wideLines )
-          && ( largePoints == rhs.largePoints )
-          && ( alphaToOne == rhs.alphaToOne )
-          && ( multiViewport == rhs.multiViewport )
-          && ( samplerAnisotropy == rhs.samplerAnisotropy )
-          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
-          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
-          && ( textureCompressionBC == rhs.textureCompressionBC )
-          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
-          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
-          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
-          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
-          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
-          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
-          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
-          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
-          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
-          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
-          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
-          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
-          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
-          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
-          && ( shaderClipDistance == rhs.shaderClipDistance )
-          && ( shaderCullDistance == rhs.shaderCullDistance )
-          && ( shaderFloat64 == rhs.shaderFloat64 )
-          && ( shaderInt64 == rhs.shaderInt64 )
-          && ( shaderInt16 == rhs.shaderInt16 )
-          && ( shaderResourceResidency == rhs.shaderResourceResidency )
-          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
-          && ( sparseBinding == rhs.sparseBinding )
-          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
-          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
-          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
-          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
-          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
-          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
-          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
-          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
-          && ( variableMultisampleRate == rhs.variableMultisampleRate )
-          && ( inheritedQueries == rhs.inheritedQueries );
-    }
 
-    bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return !operator==( rhs );
-    }
-#endif
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  public:
-    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
-    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
-    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
-    VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
-    VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
-    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
-    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
-    VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
-    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
-    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
-    VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
-    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
-    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
-    VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
-    VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
-    VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
-    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
-    VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
-    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
-    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
-    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
-    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
-    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
-    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
-    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
-    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
-    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
-    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
-    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
-    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
 
-  };
-  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-  struct DeviceCreateInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
-    {}
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
-    {}
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} )
-    : flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
-    {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
-      return *this;
-    }
 
-    DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pNext = pNext_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
 
-    DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      queueCreateInfoCount = queueCreateInfoCount_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMask( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pQueueCreateInfos = pQueueCreateInfos_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
-    {
-      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
-      pQueueCreateInfos = queueCreateInfos_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      enabledLayerCount = enabledLayerCount_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
-    {
-      ppEnabledLayerNames = ppEnabledLayerNames_;
-      return *this;
-    }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
-    {
-      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
-      ppEnabledLayerNames = pEnabledLayerNames_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
-    {
-      enabledExtensionCount = enabledExtensionCount_;
-      return *this;
-    }
 
-    DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
-    {
-      ppEnabledExtensionNames = ppEnabledExtensionNames_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const DependencyInfoKHR & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
-    {
-      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
-      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pEnabledFeatures = pEnabledFeatures_;
-      return *this;
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-    operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
-    }
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
-    {
-      return *reinterpret_cast<VkDeviceCreateInfo*>( this );
-    }
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceCreateInfo const& ) const = default;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineWidth( float lineWidth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result end( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 #else
-    bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
-          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
-          && ( enabledLayerCount == rhs.enabledLayerCount )
-          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
-          && ( enabledExtensionCount == rhs.enabledExtensionCount )
-          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
-          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+      return m_commandBuffer;
     }
 
-    bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_commandBuffer != VK_NULL_HANDLE;
     }
-#endif
 
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == VK_NULL_HANDLE;
+    }
 
+  private:
+    VkCommandBuffer m_commandBuffer;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
-    uint32_t queueCreateInfoCount = {};
-    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
-    uint32_t enabledLayerCount = {};
-    const char* const * ppEnabledLayerNames = {};
-    uint32_t enabledExtensionCount = {};
-    const char* const * ppEnabledExtensionNames = {};
-    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
+  {
+    using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
 
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
   };
-  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
   {
-    using Type = DeviceCreateInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct DeviceDeviceMemoryReportCreateInfoEXT
+  struct CommandBufferSubmitInfoKHR
   {
-    static const bool allowDuplicate = true;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfoKHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : commandBuffer( commandBuffer_ ), deviceMask( deviceMask_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
+    CommandBufferSubmitInfoKHR( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferSubmitInfoKHR( *reinterpret_cast<CommandBufferSubmitInfoKHR const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfoKHR & operator=( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    CommandBufferSubmitInfoKHR & operator=( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR const *>( &rhs );
       return *this;
     }
 
-    DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    CommandBufferSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    CommandBufferSubmitInfoKHR & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      pfnUserCallback = pfnUserCallback_;
+      commandBuffer = commandBuffer_;
       return *this;
     }
 
-    DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    CommandBufferSubmitInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
     {
-      pUserData = pUserData_;
+      deviceMask = deviceMask_;
       return *this;
     }
 
 
-    operator VkDeviceDeviceMemoryReportCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    operator VkCommandBufferSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+      return *reinterpret_cast<const VkCommandBufferSubmitInfoKHR*>( this );
     }
 
-    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    operator VkCommandBufferSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+      return *reinterpret_cast<VkCommandBufferSubmitInfoKHR*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const& ) const = default;
+    auto operator<=>( CommandBufferSubmitInfoKHR const& ) const = default;
 #else
-    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( CommandBufferSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pfnUserCallback == rhs.pfnUserCallback )
-          && ( pUserData == rhs.pUserData );
+          && ( commandBuffer == rhs.commandBuffer )
+          && ( deviceMask == rhs.deviceMask );
     }
 
-    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( CommandBufferSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31158,82 +31432,88 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfoKHR;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
-    PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
-    void* pUserData = {};
+    VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
+    uint32_t deviceMask = {};
 
   };
-  static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( CommandBufferSubmitInfoKHR ) == sizeof( VkCommandBufferSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
+  struct CppType<StructureType, StructureType::eCommandBufferSubmitInfoKHR>
   {
-    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
+    using Type = CommandBufferSubmitInfoKHR;
   };
 
-  struct DeviceDiagnosticsConfigCreateInfoNV
+  struct CommandPoolCreateInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
-    : flags( flags_ )
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
+    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
       return *this;
     }
 
-    DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
+    CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
 
-    operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+      return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
     }
 
-    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+      return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default;
+    auto operator<=>( CommandPoolCreateInfo const& ) const = default;
 #else
-    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags );
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex );
     }
 
-    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31242,169 +31522,186 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
 
   };
-  static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
+  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
   {
-    using Type = DeviceDiagnosticsConfigCreateInfoNV;
+    using Type = CommandPoolCreateInfo;
   };
 
-  struct DeviceEventInfoEXT
+  class ShaderModule
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
+  public:
+    using CType = VkShaderModule;
 
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
-    : deviceEvent( deviceEvent_ )
-    {}
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
 
-    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+  public:
+    VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
 
-    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
+    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule( shaderModule )
+    {}
 
-    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
+      m_shaderModule = shaderModule;
       return *this;
     }
+#endif
 
-    DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_shaderModule = VK_NULL_HANDLE;
       return *this;
     }
 
-    DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderModule const& ) const = default;
+#else
+    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      deviceEvent = deviceEvent_;
-      return *this;
+      return m_shaderModule == rhs.m_shaderModule;
     }
 
-
-    operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
+      return m_shaderModule != rhs.m_shaderModule;
     }
 
-    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
+      return m_shaderModule < rhs.m_shaderModule;
     }
+#endif
 
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule;
+    }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceEventInfoEXT const& ) const = default;
-#else
-    bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceEvent == rhs.deviceEvent );
+      return m_shaderModule != VK_NULL_HANDLE;
     }
 
-    bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_shaderModule == VK_NULL_HANDLE;
     }
-#endif
 
+  private:
+    VkShaderModule m_shaderModule;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
+  {
+    using type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
 
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
   };
-  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
   {
-    using Type = DeviceEventInfoEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct DeviceGroupBindSparseInfo
+  struct SpecializationMapEntry
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
+
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
-    : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
+    : constantID( constantID_ ), offset( offset_ ), size( size_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
+    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
       return *this;
     }
 
-    DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      constantID = constantID_;
       return *this;
     }
 
-    DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
     {
-      resourceDeviceIndex = resourceDeviceIndex_;
+      offset = offset_;
       return *this;
     }
 
-    DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
     {
-      memoryDeviceIndex = memoryDeviceIndex_;
+      size = size_;
       return *this;
     }
 
 
-    operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+      return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
     }
 
-    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+      return *reinterpret_cast<VkSpecializationMapEntry*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
+    auto operator<=>( SpecializationMapEntry const& ) const = default;
 #else
-    bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
-          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+      return ( constantID == rhs.constantID )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
     }
 
-    bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31413,82 +31710,112 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
-    const void* pNext = {};
-    uint32_t resourceDeviceIndex = {};
-    uint32_t memoryDeviceIndex = {};
+    uint32_t constantID = {};
+    uint32_t offset = {};
+    size_t size = {};
 
   };
-  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
+  struct SpecializationInfo
   {
-    using Type = DeviceGroupBindSparseInfo;
-  };
-  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
 
-  struct DeviceGroupCommandBufferBeginInfo
-  {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
-    : deviceMask( deviceMask_ )
+    VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+    : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
+    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+    : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
       return *this;
     }
 
-    DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      mapEntryCount = mapEntryCount_;
       return *this;
     }
 
-    DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
     {
-      deviceMask = deviceMask_;
+      pMapEntries = pMapEntries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
+      pMapEntries = mapEntries_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+    SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
 
-    operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+      pData = pData_;
+      return *this;
     }
 
-    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+      dataSize = data_.size() * sizeof(T);
+      pData = data_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationInfo*>( this );
+    }
+
+    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
+    auto operator<=>( SpecializationInfo const& ) const = default;
 #else
-    bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceMask == rhs.deviceMask );
+      return ( mapEntryCount == rhs.mapEntryCount )
+          && ( pMapEntries == rhs.pMapEntries )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
     }
 
-    bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31497,181 +31824,217 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
-    const void* pNext = {};
-    uint32_t deviceMask = {};
+    uint32_t mapEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
+    size_t dataSize = {};
+    const void* pData = {};
 
   };
-  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
 
-  template <>
-  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
+  struct PipelineShaderStageCreateInfo
   {
-    using Type = DeviceGroupCommandBufferBeginInfo;
-  };
-  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
 
-  class DisplayKHR
-  {
-  public:
-    using CType = VkDisplayKHR;
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+    {}
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-  public:
-    VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
-      : m_displayKHR(VK_NULL_HANDLE)
+    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_displayKHR(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
-      : m_displayKHR( displayKHR )
-    {}
+    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+      return *this;
+    }
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_displayKHR = displayKHR;
+      pNext = pNext_;
       return *this;
     }
-#endif
 
-    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_displayKHR = VK_NULL_HANDLE;
+      flags = flags_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DisplayKHR const& ) const = default;
-#else
-    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR == rhs.m_displayKHR;
+      stage = stage_;
+      return *this;
     }
 
-    bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR != rhs.m_displayKHR;
+      module = module_;
+      return *this;
     }
 
-    bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR < rhs.m_displayKHR;
+      pName = pName_;
+      return *this;
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+    PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR;
+      pSpecializationInfo = pSpecializationInfo_;
+      return *this;
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+    operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR != VK_NULL_HANDLE;
+      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return m_displayKHR == VK_NULL_HANDLE;
+      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
     }
 
-  private:
-    VkDisplayKHR m_displayKHR;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
 
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayKHR>
-  {
-    using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
-  };
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( module == rhs.module )
+          && ( pName == rhs.pName )
+          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+    }
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
-  };
+    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+    VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+    const char* pName = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
+
+  };
+  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = PipelineShaderStageCreateInfo;
   };
 
-  struct PerformanceConfigurationAcquireInfoINTEL
+  struct ComputePipelineCreateInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
-    : type( type_ )
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
+    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
       return *this;
     }
 
-    PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      flags = flags_;
       return *this;
     }
 
+    ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
 
-    operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+      layout = layout_;
+      return *this;
     }
 
-    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+
+    operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+    }
+
+    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default;
+    auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
 #else
-    bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( type == rhs.type );
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
     }
 
-    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31680,317 +32043,223 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
 
   };
-  static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
+  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
   {
-    using Type = PerformanceConfigurationAcquireInfoINTEL;
+    using Type = ComputePipelineCreateInfo;
   };
 
-  class PerformanceConfigurationINTEL
+  struct ConformanceVersion
   {
-  public:
-    using CType = VkPerformanceConfigurationINTEL;
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
 
-  public:
-    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
-      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
+    : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
-      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
+    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
       return *this;
     }
-#endif
 
-    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+      major = major_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PerformanceConfigurationINTEL const& ) const = default;
-#else
-    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
-    }
-
-    bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+      minor = minor_;
+      return *this;
     }
 
-    bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+      subminor = subminor_;
+      return *this;
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+    ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_performanceConfigurationINTEL;
+      patch = patch_;
+      return *this;
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
-    }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+      return *reinterpret_cast<const VkConformanceVersion*>( this );
     }
 
-  private:
-    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePerformanceConfigurationINTEL>
-  {
-    using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
-  };
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
-  };
-
-
-
-  template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
-  {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
-  };
-
-  class QueryPool
-  {
-  public:
-    using CType = VkQueryPool;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
-
-  public:
-    VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
-      : m_queryPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_queryPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
-      : m_queryPool( queryPool )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
     {
-      m_queryPool = queryPool;
-      return *this;
+      return *reinterpret_cast<VkConformanceVersion*>( this );
     }
-#endif
 
-    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-    {
-      m_queryPool = VK_NULL_HANDLE;
-      return *this;
-    }
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( QueryPool const& ) const = default;
+    auto operator<=>( ConformanceVersion const& ) const = default;
 #else
-    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_queryPool == rhs.m_queryPool;
-    }
-
-    bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_queryPool != rhs.m_queryPool;
+      return ( major == rhs.major )
+          && ( minor == rhs.minor )
+          && ( subminor == rhs.subminor )
+          && ( patch == rhs.patch );
     }
 
-    bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_queryPool < rhs.m_queryPool;
+      return !operator==( rhs );
     }
 #endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_queryPool;
-    }
-
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_queryPool != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_queryPool == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkQueryPool m_queryPool;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
-  {
-    using type = VULKAN_HPP_NAMESPACE::QueryPool;
-  };
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
-  };
-
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
-  };
 
+  public:
+    uint8_t major = {};
+    uint8_t minor = {};
+    uint8_t subminor = {};
+    uint8_t patch = {};
 
-  template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
-  {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
+  static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+  using ConformanceVersionKHR = ConformanceVersion;
 
-  struct RenderPassBeginInfo
+  struct CooperativeMatrixPropertiesNV
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
-    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
+    : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
-    {}
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
-    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
+    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
       return *this;
     }
 
-    RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
     {
-      renderPass = renderPass_;
+      MSize = MSize_;
       return *this;
     }
 
-    RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
     {
-      framebuffer = framebuffer_;
+      NSize = NSize_;
       return *this;
     }
 
-    RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
     {
-      renderArea = renderArea_;
+      KSize = KSize_;
       return *this;
     }
 
-    RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
     {
-      clearValueCount = clearValueCount_;
+      AType = AType_;
       return *this;
     }
 
-    RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
     {
-      pClearValues = pClearValues_;
+      BType = BType_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
     {
-      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
-      pClearValues = clearValues_.data();
+      CType = CType_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+    CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      DType = DType_;
+      return *this;
+    }
 
-    operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
+      scope = scope_;
+      return *this;
     }
 
-    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+
+    operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
+      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( RenderPassBeginInfo const& ) const = default;
+    auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
 #else
-    bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( renderPass == rhs.renderPass )
-          && ( framebuffer == rhs.framebuffer )
-          && ( renderArea == rhs.renderArea )
-          && ( clearValueCount == rhs.clearValueCount )
-          && ( pClearValues == rhs.pClearValues );
+          && ( MSize == rhs.MSize )
+          && ( NSize == rhs.NSize )
+          && ( KSize == rhs.KSize )
+          && ( AType == rhs.AType )
+          && ( BType == rhs.BType )
+          && ( CType == rhs.CType )
+          && ( DType == rhs.DType )
+          && ( scope == rhs.scope );
     }
 
-    bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -31999,84 +32268,87 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
-    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
-    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
-    uint32_t clearValueCount = {};
-    const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+    void* pNext = {};
+    uint32_t MSize = {};
+    uint32_t NSize = {};
+    uint32_t KSize = {};
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
 
   };
-  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
   {
-    using Type = RenderPassBeginInfo;
+    using Type = CooperativeMatrixPropertiesNV;
   };
 
-  struct SubpassBeginInfo
+  struct CopyCommandTransformInfoQCOM
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
-    : contents( contents_ )
+    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
+    : transform( transform_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
+    CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
       return *this;
     }
 
-    SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    CopyCommandTransformInfoQCOM & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+    CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
     {
-      contents = contents_;
+      transform = transform_;
       return *this;
     }
 
 
-    operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    operator VkCopyCommandTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
+      return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
     }
 
-    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSubpassBeginInfo*>( this );
+      return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( SubpassBeginInfo const& ) const = default;
+    auto operator<=>( CopyCommandTransformInfoQCOM const& ) const = default;
 #else
-    bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( contents == rhs.contents );
+          && ( transform == rhs.transform );
     }
 
-    bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32085,93 +32357,122 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
 
   };
-  static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
+  struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
   {
-    using Type = SubpassBeginInfo;
+    using Type = CopyCommandTransformInfoQCOM;
   };
-  using SubpassBeginInfoKHR = SubpassBeginInfo;
 
-  struct ImageBlit
+  struct CopyDescriptorSet
   {
-
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
+    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
       return *this;
     }
 
-    ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcSubresource = srcSubresource_;
+      pNext = pNext_;
       return *this;
     }
 
-    ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcOffsets = srcOffsets_;
+      srcSet = srcSet_;
       return *this;
     }
 
-    ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstSubresource = dstSubresource_;
+      srcBinding = srcBinding_;
       return *this;
     }
 
-    ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstOffsets = dstOffsets_;
+      srcArrayElement = srcArrayElement_;
       return *this;
     }
 
+    CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
 
-    operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageBlit*>( this );
+      dstBinding = dstBinding_;
+      return *this;
     }
 
-    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+    CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageBlit*>( this );
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+
+    operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+    }
+
+    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyDescriptorSet*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageBlit const& ) const = default;
+    auto operator<=>( CopyDescriptorSet const& ) const = default;
 #else
-    bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffsets == rhs.srcOffsets )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffsets == rhs.dstOffsets );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSet == rhs.srcSet )
+          && ( srcBinding == rhs.srcBinding )
+          && ( srcArrayElement == rhs.srcArrayElement )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount );
     }
 
-    bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32180,94 +32481,132 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
-    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
-    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
+    uint32_t srcBinding = {};
+    uint32_t srcArrayElement = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
 
   };
-  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
 
-  struct ImageSubresourceRange
+  template <>
+  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
   {
+    using Type = CopyDescriptorSet;
+  };
 
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct D3D12FenceSubmitInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
-    : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
+    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+    : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
       return *this;
     }
 
-    ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      aspectMask = aspectMask_;
+      pNext = pNext_;
       return *this;
     }
 
-    ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      baseMipLevel = baseMipLevel_;
+      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
       return *this;
     }
 
-    ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
     {
-      levelCount = levelCount_;
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
       return *this;
     }
 
-    ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
     {
-      baseArrayLayer = baseArrayLayer_;
+      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+      pWaitSemaphoreValues = waitSemaphoreValues_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      layerCount = layerCount_;
+      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
       return *this;
     }
 
-
-    operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
+    D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
     }
 
-    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageSubresourceRange*>( this );
+      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+      pSignalSemaphoreValues = signalSemaphoreValues_.data();
+      return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageSubresourceRange const& ) const = default;
-#else
-    bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return ( aspectMask == rhs.aspectMask )
-          && ( baseMipLevel == rhs.baseMipLevel )
-          && ( levelCount == rhs.levelCount )
-          && ( baseArrayLayer == rhs.baseArrayLayer )
-          && ( layerCount == rhs.layerCount );
+      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
     }
 
-    bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
+#else
+    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32276,95 +32615,98 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
-    uint32_t baseMipLevel = {};
-    uint32_t levelCount = {};
-    uint32_t baseArrayLayer = {};
-    uint32_t layerCount = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+    const void* pNext = {};
+    uint32_t waitSemaphoreValuesCount = {};
+    const uint64_t* pWaitSemaphoreValues = {};
+    uint32_t signalSemaphoreValuesCount = {};
+    const uint64_t* pSignalSemaphoreValues = {};
 
   };
-  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
-  struct ImageCopy
+  template <>
+  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
   {
+    using Type = D3D12FenceSubmitInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      srcSubresource = srcSubresource_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
       return *this;
     }
 
-    ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcOffset = srcOffset_;
+      pNext = pNext_;
       return *this;
     }
 
-    ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstSubresource = dstSubresource_;
+      objectType = objectType_;
       return *this;
     }
 
-    ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstOffset = dstOffset_;
+      object = object_;
       return *this;
     }
 
-    ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
     {
-      extent = extent_;
+      pObjectName = pObjectName_;
       return *this;
     }
 
 
-    operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageCopy*>( this );
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
     }
 
-    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageCopy*>( this );
+      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageCopy const& ) const = default;
+    auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default;
 #else
-    bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffset == rhs.srcOffset )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffset == rhs.dstOffset )
-          && ( extent == rhs.extent );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( pObjectName == rhs.pObjectName );
     }
 
-    bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32373,69 +32715,127 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
-    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    const char* pObjectName = {};
 
   };
-  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  struct SubpassEndInfo
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
+  {
+    using Type = DebugMarkerObjectNameInfoEXT;
+  };
+
+  struct DebugMarkerObjectTagInfoEXT
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
-    
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
-      : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
       return *this;
     }
 
-    SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
+    DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
 
-    operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSubpassEndInfo*>( this );
+      object = object_;
+      return *this;
     }
 
-    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+    DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSubpassEndInfo*>( this );
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( SubpassEndInfo const& ) const = default;
+    auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default;
 #else
-    bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext );
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
     }
 
-    bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32444,172 +32844,196 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
     const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void* pTag = {};
 
   };
-  static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eSubpassEndInfo>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
   {
-    using Type = SubpassEndInfo;
+    using Type = DebugMarkerObjectTagInfoEXT;
   };
-  using SubpassEndInfoKHR = SubpassEndInfo;
 
-  class IndirectCommandsLayoutNV
+  struct DebugReportCallbackCreateInfoEXT
   {
-  public:
-    using CType = VkIndirectCommandsLayoutNV;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
 
-  public:
-    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
-      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
-      : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
       return *this;
     }
-#endif
 
-    IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
-#else
-    bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
+      flags = flags_;
+      return *this;
     }
 
-    bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
+      pfnCallback = pfnCallback_;
+      return *this;
     }
 
-    bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
+      pUserData = pUserData_;
+      return *this;
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
+
+    operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV;
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
+      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
     }
 
-  private:
-    VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
+    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
-  {
-    using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
-  };
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
+    PFN_vkDebugReportCallbackEXT pfnCallback = {};
+    void* pUserData = {};
 
+  };
+  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = DebugReportCallbackCreateInfoEXT;
   };
 
-  struct IndirectCommandsStreamNV
+  struct DebugUtilsObjectNameInfoEXT
   {
-
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
-    : buffer( buffer_ ), offset( offset_ )
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
+    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
       return *this;
     }
 
-    IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      buffer = buffer_;
+      pNext = pNext_;
       return *this;
     }
 
-    IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
     {
-      offset = offset_;
+      objectType = objectType_;
       return *this;
     }
 
+    DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
 
-    operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
+      pObjectName = pObjectName_;
+      return *this;
     }
 
-    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+
+    operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
+      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
+    auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default;
 #else
-    bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( buffer == rhs.buffer )
-          && ( offset == rhs.offset );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( pObjectName == rhs.pObjectName );
     }
 
-    bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32618,172 +33042,178 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    const char* pObjectName = {};
 
   };
-  static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  struct GeneratedCommandsInfoNV
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
+  {
+    using Type = DebugUtilsObjectNameInfoEXT;
+  };
+
+  struct DebugUtilsMessengerCallbackDataEXT
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
-    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
+    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
     {}
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
-    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
+    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
     {}
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      pipelineBindPoint = pipelineBindPoint_;
+      flags = flags_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
     {
-      pipeline = pipeline_;
+      pMessageIdName = pMessageIdName_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
     {
-      indirectCommandsLayout = indirectCommandsLayout_;
+      messageIdNumber = messageIdNumber_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT
     {
-      streamCount = streamCount_;
+      pMessage = pMessage_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pStreams = pStreams_;
+      queueLabelCount = queueLabelCount_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
     {
-      streamCount = static_cast<uint32_t>( streams_.size() );
-      pStreams = streams_.data();
+      pQueueLabels = pQueueLabels_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
     {
-      sequencesCount = sequencesCount_;
+      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
+      pQueueLabels = queueLabels_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      preprocessBuffer = preprocessBuffer_;
+      cmdBufLabelCount = cmdBufLabelCount_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
     {
-      preprocessOffset = preprocessOffset_;
+      pCmdBufLabels = pCmdBufLabels_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
     {
-      preprocessSize = preprocessSize_;
+      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
+      pCmdBufLabels = cmdBufLabels_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      sequencesCountBuffer = sequencesCountBuffer_;
+      objectCount = objectCount_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
     {
-      sequencesCountOffset = sequencesCountOffset_;
+      pObjects = pObjects_;
       return *this;
     }
 
-    GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
-    {
-      sequencesIndexBuffer = sequencesIndexBuffer_;
-      return *this;
-    }
-
-    GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
     {
-      sequencesIndexOffset = sequencesIndexOffset_;
+      objectCount = static_cast<uint32_t>( objects_.size() );
+      pObjects = objects_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
+      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
     }
 
-    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
+      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
+    auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default;
 #else
-    bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( pipeline == rhs.pipeline )
-          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
-          && ( streamCount == rhs.streamCount )
-          && ( pStreams == rhs.pStreams )
-          && ( sequencesCount == rhs.sequencesCount )
-          && ( preprocessBuffer == rhs.preprocessBuffer )
-          && ( preprocessOffset == rhs.preprocessOffset )
-          && ( preprocessSize == rhs.preprocessSize )
-          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
-          && ( sequencesCountOffset == rhs.sequencesCountOffset )
-          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
-          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+          && ( flags == rhs.flags )
+          && ( pMessageIdName == rhs.pMessageIdName )
+          && ( messageIdNumber == rhs.messageIdNumber )
+          && ( pMessage == rhs.pMessage )
+          && ( queueLabelCount == rhs.queueLabelCount )
+          && ( pQueueLabels == rhs.pQueueLabels )
+          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
+          && ( pCmdBufLabels == rhs.pCmdBufLabels )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjects == rhs.pObjects );
     }
 
-    bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32792,99 +33222,117 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
-    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
-    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
-    uint32_t streamCount = {};
-    const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
-    uint32_t sequencesCount = {};
-    VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
-    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
-    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
+    const char* pMessageIdName = {};
+    int32_t messageIdNumber = {};
+    const char* pMessage = {};
+    uint32_t queueLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {};
+    uint32_t cmdBufLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
+    uint32_t objectCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
 
   };
-  static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
   {
-    using Type = GeneratedCommandsInfoNV;
+    using Type = DebugUtilsMessengerCallbackDataEXT;
   };
 
-  struct MemoryBarrier
+  struct DebugUtilsMessengerCreateInfoEXT
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
-      : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
+    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
       return *this;
     }
 
-    MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcAccessMask = srcAccessMask_;
+      flags = flags_;
       return *this;
     }
 
-    MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstAccessMask = dstAccessMask_;
+      messageSeverity = messageSeverity_;
       return *this;
     }
 
+    DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageType = messageType_;
+      return *this;
+    }
 
-    operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkMemoryBarrier*>( this );
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
     }
 
-    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkMemoryBarrier*>( this );
+      pUserData = pUserData_;
+      return *this;
+    }
+
+
+    operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( MemoryBarrier const& ) const = default;
+    auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default;
 #else
-    bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask );
+          && ( flags == rhs.flags )
+          && ( messageSeverity == rhs.messageSeverity )
+          && ( messageType == rhs.messageType )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
     }
 
-    bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -32893,130 +33341,129 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
-    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
+    void* pUserData = {};
 
   };
-  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eMemoryBarrier>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
   {
-    using Type = MemoryBarrier;
+    using Type = DebugUtilsMessengerCreateInfoEXT;
   };
 
-  struct ImageMemoryBarrier
+  struct DebugUtilsObjectTagInfoEXT
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
+    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
     {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
       return *this;
     }
 
-    ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
     {
-      oldLayout = oldLayout_;
+      objectType = objectType_;
       return *this;
     }
 
-    ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
     {
-      newLayout = newLayout_;
+      objectHandle = objectHandle_;
       return *this;
     }
 
-    ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      tagName = tagName_;
       return *this;
     }
 
-    ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      tagSize = tagSize_;
       return *this;
     }
 
-    ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
     {
-      image = image_;
+      pTag = pTag_;
       return *this;
     }
 
-    ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
     {
-      subresourceRange = subresourceRange_;
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
     }
 
-    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageMemoryBarrier const& ) const = default;
+    auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default;
 #else
-    bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( oldLayout == rhs.oldLayout )
-          && ( newLayout == rhs.newLayout )
-          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
-          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
-          && ( image == rhs.image )
-          && ( subresourceRange == rhs.subresourceRange );
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
     }
 
-    bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -33025,278 +33472,255 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
-    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
-    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
-    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
-    uint32_t srcQueueFamilyIndex = {};
-    uint32_t dstQueueFamilyIndex = {};
-    VULKAN_HPP_NAMESPACE::Image image = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void* pTag = {};
 
   };
-  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
   {
-    using Type = ImageMemoryBarrier;
+    using Type = DebugUtilsObjectTagInfoEXT;
   };
 
-  class BufferView
+  struct DedicatedAllocationBufferCreateInfoNV
   {
-  public:
-    using CType = VkBufferView;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
 
-  public:
-    VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
-      : m_bufferView(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : dedicatedAllocation( dedicatedAllocation_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_bufferView(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
-      : m_bufferView( bufferView )
+    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_bufferView = bufferView;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
       return *this;
     }
-#endif
 
-    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_bufferView = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( BufferView const& ) const = default;
-#else
-    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_bufferView == rhs.m_bufferView;
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
     }
 
-    bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_bufferView != rhs.m_bufferView;
-    }
 
-    bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_bufferView < rhs.m_bufferView;
+      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      return m_bufferView;
+      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_bufferView != VK_NULL_HANDLE;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
     }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_bufferView == VK_NULL_HANDLE;
+      return !operator==( rhs );
     }
+#endif
 
-  private:
-    VkBufferView m_bufferView;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
-  {
-    using type = VULKAN_HPP_NAMESPACE::BufferView;
-  };
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::BufferView;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::BufferView;
   };
-
+  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
   {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+    using Type = DedicatedAllocationBufferCreateInfoNV;
   };
 
-  struct WriteDescriptorSet
+  struct DedicatedAllocationImageCreateInfoNV
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
-    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : dedicatedAllocation( dedicatedAllocation_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
-      : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
+    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
     {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
-    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
-    {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
-#else
-      if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
-      {
-        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
-      }
-#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
       return *this;
     }
 
-    WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstSet = dstSet_;
+      dedicatedAllocation = dedicatedAllocation_;
       return *this;
     }
 
-    WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+
+    operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      dstBinding = dstBinding_;
-      return *this;
+      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
     }
 
-    WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      dstArrayElement = dstArrayElement_;
-      return *this;
+      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
     }
 
-    WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = descriptorCount_;
-      return *this;
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
     }
 
-    WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      descriptorType = descriptorType_;
-      return *this;
+      return !operator==( rhs );
     }
+#endif
 
-    WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
+  };
+  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
+  {
+    using Type = DedicatedAllocationImageCreateInfoNV;
+  };
+
+  struct DedicatedAllocationMemoryAllocateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      pImageInfo = pImageInfo_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
-      pImageInfo = imageInfo_.data();
+      pNext = pNext_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
     {
-      pBufferInfo = pBufferInfo_;
+      image = image_;
       return *this;
     }
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
-    {
-      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
-      pBufferInfo = bufferInfo_.data();
-      return *this;
-    }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-    WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
-    {
-      pTexelBufferView = pTexelBufferView_;
-      return *this;
-    }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
     {
-      descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
-      pTexelBufferView = texelBufferView_.data();
+      buffer = buffer_;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
     }
 
-    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( WriteDescriptorSet const& ) const = default;
+    auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default;
 #else
-    bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( dstSet == rhs.dstSet )
-          && ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( descriptorType == rhs.descriptorType )
-          && ( pImageInfo == rhs.pImageInfo )
-          && ( pBufferInfo == rhs.pBufferInfo )
-          && ( pTexelBufferView == rhs.pTexelBufferView );
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
     }
 
-    bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -33305,307 +33729,190 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
-    uint32_t dstBinding = {};
-    uint32_t dstArrayElement = {};
-    uint32_t descriptorCount = {};
-    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
-    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
-    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
-    const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
 
   };
-  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
   {
-    using Type = WriteDescriptorSet;
+    using Type = DedicatedAllocationMemoryAllocateInfoNV;
   };
 
-  class DescriptorUpdateTemplate
+  struct DescriptorPoolSize
   {
-  public:
-    using CType = VkDescriptorUpdateTemplate;
 
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
 
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
-      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), descriptorCount( descriptorCount_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
-    {}
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
-      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
     {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
-    {
-      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
-      return *this;
-    }
-#endif
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
-#else
-    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
-    }
-
-    bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
-    }
-
-    bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+      type = type_;
+      return *this;
     }
-#endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+    DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorUpdateTemplate;
+      descriptorCount = descriptorCount_;
+      return *this;
     }
 
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
-    }
 
-    bool operator!() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
     {
-      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+      return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
     }
 
-  private:
-    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
-  {
-    using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
-  };
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
-  };
-
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
-  };
-
-
-  template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
-  {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
-  };
-  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
-
-  class Event
-  {
-  public:
-    using CType = VkEvent;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
-
-  public:
-    VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
-      : m_event(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_event(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
-      : m_event( event )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
     {
-      m_event = event;
-      return *this;
+      return *reinterpret_cast<VkDescriptorPoolSize*>( this );
     }
-#endif
 
-    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-    {
-      m_event = VK_NULL_HANDLE;
-      return *this;
-    }
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( Event const& ) const = default;
+    auto operator<=>( DescriptorPoolSize const& ) const = default;
 #else
-    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
-    {
-      return m_event == rhs.m_event;
-    }
-
-    bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_event != rhs.m_event;
+      return ( type == rhs.type )
+          && ( descriptorCount == rhs.descriptorCount );
     }
 
-    bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_event < rhs.m_event;
+      return !operator==( rhs );
     }
 #endif
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_event;
-    }
-
-    explicit operator bool() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_event != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const VULKAN_HPP_NOEXCEPT
-    {
-      return m_event == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkEvent m_event;
-  };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
-
-  template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
-  {
-    using type = VULKAN_HPP_NAMESPACE::Event;
-  };
-
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::Event;
-  };
-
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::Event;
-  };
 
+  public:
+    VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
 
-  template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
-  {
-    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
+  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
 
-  struct ImageResolve
+  struct DescriptorPoolCreateInfo
   {
-
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
+    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
     {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
+    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
       return *this;
     }
 
-    ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcSubresource = srcSubresource_;
+      pNext = pNext_;
       return *this;
     }
 
-    ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      srcOffset = srcOffset_;
+      flags = flags_;
       return *this;
     }
 
-    ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstSubresource = dstSubresource_;
+      maxSets = maxSets_;
       return *this;
     }
 
-    ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      dstOffset = dstOffset_;
+      poolSizeCount = poolSizeCount_;
       return *this;
     }
 
-    ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
     {
-      extent = extent_;
+      pPoolSizes = pPoolSizes_;
       return *this;
     }
 
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
+      pPoolSizes = poolSizes_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+
+    operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageResolve*>( this );
+      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
     }
 
-    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageResolve*>( this );
+      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageResolve const& ) const = default;
+    auto operator<=>( DescriptorPoolCreateInfo const& ) const = default;
 #else
-    bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffset == rhs.srcOffset )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffset == rhs.dstOffset )
-          && ( extent == rhs.extent );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( maxSets == rhs.maxSets )
+          && ( poolSizeCount == rhs.poolSizeCount )
+          && ( pPoolSizes == rhs.pPoolSizes );
     }
 
-    bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -33614,104 +33921,83 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
-    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
+    uint32_t maxSets = {};
+    uint32_t poolSizeCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
 
   };
-  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct ImageResolve2KHR
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
+  {
+    using Type = DescriptorPoolCreateInfo;
+  };
+
+  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
+    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolInlineUniformBlockCreateInfoEXT( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
       return *this;
     }
 
-    ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
-    {
-      srcSubresource = srcSubresource_;
-      return *this;
-    }
-
-    ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
-    {
-      srcOffset = srcOffset_;
-      return *this;
-    }
-
-    ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dstSubresource = dstSubresource_;
-      return *this;
-    }
-
-    ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
-    {
-      dstOffset = dstOffset_;
-      return *this;
-    }
-
-    ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
     {
-      extent = extent_;
+      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
       return *this;
     }
 
 
-    operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageResolve2KHR*>( this );
+      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
     }
 
-    operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageResolve2KHR*>( this );
+      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ImageResolve2KHR const& ) const = default;
+    auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default;
 #else
-    bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffset == rhs.srcOffset )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffset == rhs.dstOffset )
-          && ( extent == rhs.extent );
+          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
     }
 
-    bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -33720,221 +34006,309 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
     const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
-    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
-    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
-    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    uint32_t maxInlineUniformBlockBindings = {};
 
   };
-  static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::eImageResolve2KHR>
+  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
   {
-    using Type = ImageResolve2KHR;
+    using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
   };
 
-  struct ResolveImageInfo2KHR
+  class DescriptorPool
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
-    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
-    {}
+  public:
+    using CType = VkDescriptorPool;
 
-    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
 
-    ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
     {}
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
-    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
     {}
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool( descriptorPool )
+    {}
 
-    ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
+      m_descriptorPool = descriptorPool;
       return *this;
     }
+#endif
 
-    ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      m_descriptorPool = VK_NULL_HANDLE;
       return *this;
     }
 
-    ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPool const& ) const = default;
+#else
+    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      srcImage = srcImage_;
-      return *this;
+      return m_descriptorPool == rhs.m_descriptorPool;
     }
 
-    ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      srcImageLayout = srcImageLayout_;
-      return *this;
+      return m_descriptorPool != rhs.m_descriptorPool;
     }
 
-    ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      dstImage = dstImage_;
-      return *this;
+      return m_descriptorPool < rhs.m_descriptorPool;
     }
+#endif
 
-    ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
     {
-      dstImageLayout = dstImageLayout_;
-      return *this;
+      return m_descriptorPool;
     }
 
-    ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      regionCount = regionCount_;
-      return *this;
+      return m_descriptorPool != VK_NULL_HANDLE;
     }
 
-    ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      pRegions = pRegions_;
+      return m_descriptorPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorPool m_descriptorPool;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorPool>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorSetLayout
+  {
+  public:
+    using CType = VkDescriptorSetLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout( descriptorSetLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = descriptorSetLayout;
       return *this;
     }
+#endif
 
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      regionCount = static_cast<uint32_t>( regions_.size() );
-      pRegions = regions_.data();
+      m_descriptorSetLayout = VK_NULL_HANDLE;
       return *this;
     }
-#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayout const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+    }
 
-    operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
+      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
     }
 
-    operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
+      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
     }
+#endif
 
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout;
+    }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
-#else
-    bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcImage == rhs.srcImage )
-          && ( srcImageLayout == rhs.srcImageLayout )
-          && ( dstImage == rhs.dstImage )
-          && ( dstImageLayout == rhs.dstImageLayout )
-          && ( regionCount == rhs.regionCount )
-          && ( pRegions == rhs.pRegions );
+      return m_descriptorSetLayout != VK_NULL_HANDLE;
     }
 
-    bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_descriptorSetLayout == VK_NULL_HANDLE;
     }
-#endif
 
+  private:
+    VkDescriptorSetLayout m_descriptorSetLayout;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSetLayout>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
 
-  public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::Image srcImage = {};
-    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
-    VULKAN_HPP_NAMESPACE::Image dstImage = {};
-    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
-    uint32_t regionCount = {};
-    const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
 
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
   };
-  static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
 
   template <>
-  struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
   {
-    using Type = ResolveImageInfo2KHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
 
-  struct PerformanceMarkerInfoINTEL
+  struct DescriptorSetAllocateInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
-    : marker( marker_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+    : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
+    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
     {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
+    : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
       return *this;
     }
 
-    PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
     {
-      marker = marker_;
+      descriptorPool = descriptorPool_;
       return *this;
     }
 
+    DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
 
-    operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+      pSetLayouts = pSetLayouts_;
+      return *this;
     }
 
-    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
+      pSetLayouts = setLayouts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
+    auto operator<=>( DescriptorSetAllocateInfo const& ) const = default;
 #else
-    bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( marker == rhs.marker );
+          && ( descriptorPool == rhs.descriptorPool )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pSetLayouts == rhs.pSetLayouts );
     }
 
-    bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -33943,94 +34317,116 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
     const void* pNext = {};
-    uint64_t marker = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
+    uint32_t descriptorSetCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
 
   };
-  static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
+  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
   {
-    using Type = PerformanceMarkerInfoINTEL;
+    using Type = DescriptorSetAllocateInfo;
   };
 
-  struct PerformanceOverrideInfoINTEL
+  struct DescriptorSetLayoutBinding
   {
-    static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
+
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
-    : type( type_ ), enable( enable_ ), parameter( parameter_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
+    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
       return *this;
     }
 
-    PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
     {
-      pNext = pNext_;
+      binding = binding_;
       return *this;
     }
 
-    PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      descriptorType = descriptorType_;
       return *this;
     }
 
-    PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      enable = enable_;
+      descriptorCount = descriptorCount_;
       return *this;
     }
 
-    PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
     {
-      parameter = parameter_;
+      stageFlags = stageFlags_;
       return *this;
     }
 
+    DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImmutableSamplers = pImmutableSamplers_;
+      return *this;
+    }
 
-    operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+      descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
+      pImmutableSamplers = immutableSamplers_.data();
+      return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+
+    operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
+    auto operator<=>( DescriptorSetLayoutBinding const& ) const = default;
 #else
-    bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( type == rhs.type )
-          && ( enable == rhs.enable )
-          && ( parameter == rhs.parameter );
+      return ( binding == rhs.binding )
+          && ( descriptorType == rhs.descriptorType )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( stageFlags == rhs.stageFlags )
+          && ( pImmutableSamplers == rhs.pImmutableSamplers );
     }
 
-    bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34039,82 +34435,98 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
-    const void* pNext = {};
-    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
-    VULKAN_HPP_NAMESPACE::Bool32 enable = {};
-    uint64_t parameter = {};
-
-  };
-  static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+    const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
 
-  template <>
-  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
-  {
-    using Type = PerformanceOverrideInfoINTEL;
   };
+  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
 
-  struct PerformanceStreamMarkerInfoINTEL
+  struct DescriptorSetLayoutBindingFlagsCreateInfo
   {
     static const bool allowDuplicate = false;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
-    : marker( marker_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
-      : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
+    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
+    : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
       return *this;
     }
 
-    PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      marker = marker_;
+      bindingCount = bindingCount_;
       return *this;
     }
 
+    DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindingFlags = pBindingFlags_;
+      return *this;
+    }
 
-    operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+      bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
+      pBindingFlags = bindingFlags_.data();
+      return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
+    auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default;
 #else
-    bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( marker == rhs.marker );
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindingFlags == rhs.pBindingFlags );
     }
 
-    bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34123,106 +34535,111 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
     const void* pNext = {};
-    uint32_t marker = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
 
   };
-  static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
   template <>
-  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
   {
-    using Type = PerformanceStreamMarkerInfoINTEL;
+    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
   };
+  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
 
-  struct Viewport
+  struct DescriptorSetLayoutCreateInfo
   {
-
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
-    : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
-      : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
+    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
     {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
+    : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      x = x_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
       return *this;
     }
 
-    Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      y = y_;
+      pNext = pNext_;
       return *this;
     }
 
-    Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      width = width_;
+      flags = flags_;
       return *this;
     }
 
-    Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      height = height_;
+      bindingCount = bindingCount_;
       return *this;
     }
 
-    Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
     {
-      minDepth = minDepth_;
+      pBindings = pBindings_;
       return *this;
     }
 
-    Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
     {
-      maxDepth = maxDepth_;
+      bindingCount = static_cast<uint32_t>( bindings_.size() );
+      pBindings = bindings_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkViewport*>( this );
+      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
     }
 
-    operator VkViewport &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkViewport*>( this );
+      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( Viewport const& ) const = default;
+    auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default;
 #else
-    bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( x == rhs.x )
-          && ( y == rhs.y )
-          && ( width == rhs.width )
-          && ( height == rhs.height )
-          && ( minDepth == rhs.minDepth )
-          && ( maxDepth == rhs.maxDepth );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindings == rhs.pBindings );
     }
 
-    bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34231,90 +34648,175 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    float x = {};
-    float y = {};
-    float width = {};
-    float height = {};
-    float minDepth = {};
-    float maxDepth = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
 
   };
-  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct ShadingRatePaletteNV
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
   {
+    using Type = DescriptorSetLayoutCreateInfo;
+  };
 
+  struct DescriptorSetLayoutSupport
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
-    : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
+    : supported( supported_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
+    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayoutSupport const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supported == rhs.supported );
+    }
+
+    bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+
+  };
+  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
+  {
+    using Type = DescriptorSetLayoutSupport;
+  };
+  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+
+  struct DescriptorSetVariableDescriptorCountAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
+    : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
     {}
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
-    : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
+    DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
+    : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
     {}
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
       return *this;
     }
 
-    ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+      pNext = pNext_;
       return *this;
     }
 
-    ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorCounts = pDescriptorCounts_;
       return *this;
     }
 
 #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-    ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
     {
-      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
-      pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
+      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
+      pDescriptorCounts = descriptorCounts_.data();
       return *this;
     }
 #endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
     }
 
-    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ShadingRatePaletteNV const& ) const = default;
+    auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default;
 #else
-    bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
-          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pDescriptorCounts == rhs.pDescriptorCounts );
     }
 
-    bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34323,71 +34825,70 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    uint32_t shadingRatePaletteEntryCount = {};
-    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+    const void* pNext = {};
+    uint32_t descriptorSetCount = {};
+    const uint32_t* pDescriptorCounts = {};
 
   };
-  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct ViewportWScalingNV
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
   {
+    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
+  };
+  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
 
+  struct DescriptorSetVariableDescriptorCountLayoutSupport
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
-    : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
-      : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
+    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
-    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
-    {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
-      return *this;
-    }
-
-    ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
-    {
-      xcoeff = xcoeff_;
-      return *this;
-    }
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      ycoeff = ycoeff_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
       return *this;
     }
 
 
-    operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkViewportWScalingNV*>( this );
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
     }
 
-    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkViewportWScalingNV*>( this );
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( ViewportWScalingNV const& ) const = default;
+    auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default;
 #else
-    bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( xcoeff == rhs.xcoeff )
-          && ( ycoeff == rhs.ycoeff );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
     }
 
-    bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34396,78 +34897,107 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    float xcoeff = {};
-    float ycoeff = {};
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+    void* pNext = {};
+    uint32_t maxVariableDescriptorCount = {};
 
   };
-  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
 
-  struct StridedDeviceAddressRegionKHR
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
+  {
+    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
+  };
+  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
+
+  struct DescriptorUpdateTemplateEntry
   {
 
 
 #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
-    : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
-      : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
+    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
     {}
 #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
       return *this;
     }
 
-    StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
     {
-      deviceAddress = deviceAddress_;
+      dstBinding = dstBinding_;
       return *this;
     }
 
-    StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
     {
-      stride = stride_;
+      dstArrayElement = dstArrayElement_;
       return *this;
     }
 
-    StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      size = size_;
+      descriptorCount = descriptorCount_;
       return *this;
     }
 
+    DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
 
-    operator VkStridedDeviceAddressRegionKHR const&() const VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
+      offset = offset_;
+      return *this;
     }
 
-    operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
+      stride = stride_;
+      return *this;
+    }
+
+
+    operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
     }
 
 
 #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( StridedDeviceAddressRegionKHR const& ) const = default;
+    auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default;
 #else
-    bool operator==( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( deviceAddress == rhs.deviceAddress )
-          && ( stride == rhs.stride )
-          && ( size == rhs.size );
+      return ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride );
     }
 
-    bool operator!=( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
@@ -34476,901 +35006,1669 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   public:
-    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
-    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    size_t offset = {};
+    size_t stride = {};
 
   };
-  static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
-  static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
+  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
 
-  class CommandBuffer
+  struct DescriptorUpdateTemplateCreateInfo
   {
-  public:
-    using CType = VkCommandBuffer;
-
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
-    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
 
-  public:
-    VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
-      : m_commandBuffer(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
     {}
 
-    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
-      : m_commandBuffer(VK_NULL_HANDLE)
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
     {}
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
-      : m_commandBuffer( commandBuffer )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
+    : flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
     {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      m_commandBuffer = commandBuffer;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
       return *this;
     }
-#endif
 
-    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      m_commandBuffer = VK_NULL_HANDLE;
+      pNext = pNext_;
       return *this;
     }
 
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
-    auto operator<=>( CommandBuffer const& ) const = default;
-#else
-    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer == rhs.m_commandBuffer;
+      flags = flags_;
+      return *this;
     }
 
-    bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer != rhs.m_commandBuffer;
+      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+      return *this;
     }
 
-    bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer < rhs.m_commandBuffer;
+      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+      return *this;
     }
-#endif
 
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
+      pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+    DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      templateType = templateType_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
 
+    DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
 
+    DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
+    {
+      set = set_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+    operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+          && ( templateType == rhs.templateType )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( set == rhs.set );
+    }
 
+    bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
+    uint32_t descriptorUpdateEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+    uint32_t set = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
+  {
+    using Type = DescriptorUpdateTemplateCreateInfo;
+  };
+  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  struct DeviceQueueCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
 
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
+      return *this;
+    }
 
+    DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
 
+    DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = queueCount_;
+      return *this;
+    }
 
+    DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueuePriorities = pQueuePriorities_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = static_cast<uint32_t>( queuePriorities_.size() );
+      pQueuePriorities = queuePriorities_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
+    }
 
+    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
+#else
+    bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueCount == rhs.queueCount )
+          && ( pQueuePriorities == rhs.pQueuePriorities );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t queueCount = {};
+    const float* pQueuePriorities = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
+  {
+    using Type = DeviceQueueCreateInfo;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  struct PhysicalDeviceFeatures
+  {
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
+    {}
 
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess = robustBufferAccess_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullDrawIndexUint32 = fullDrawIndexUint32_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCubeArray = imageCubeArray_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      independentBlend = independentBlend_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryShader = geometryShader_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationShader = tessellationShader_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleRateShading = sampleRateShading_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dualSrcBlend = dualSrcBlend_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiDrawIndirect = multiDrawIndirect_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectFirstInstance = drawIndirectFirstInstance_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClamp = depthClamp_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fillModeNonSolid = fillModeNonSolid_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBounds = depthBounds_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      wideLines = wideLines_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      largePoints = largePoints_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOne = alphaToOne_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiViewport = multiViewport_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerAnisotropy = samplerAnisotropy_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionETC2 = textureCompressionETC2_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionBC = textureCompressionBC_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryPrecise = occlusionQueryPrecise_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatisticsQuery = pipelineStatisticsQuery_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+      return *this;
+    }
 
+    PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageGatherExtended = shaderImageGatherExtended_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void debugMarkerEndEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageMultisample = shaderStorageImageMultisample_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderClipDistance = shaderClipDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderCullDistance = shaderCullDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat64 = shaderFloat64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt64 = shaderInt64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt16 = shaderInt16_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceResidency = shaderResourceResidency_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceMinLod = shaderResourceMinLod_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseBinding = sparseBinding_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyBuffer = sparseResidencyBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage2D = sparseResidencyImage2D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage3D = sparseResidencyImage3D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency2Samples = sparseResidency2Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency4Samples = sparseResidency4Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency8Samples = sparseResidency8Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency16Samples = sparseResidency16Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyAliased = sparseResidencyAliased_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variableMultisampleRate = variableMultisampleRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedQueries = inheritedQueries_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( robustBufferAccess == rhs.robustBufferAccess )
+          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+          && ( imageCubeArray == rhs.imageCubeArray )
+          && ( independentBlend == rhs.independentBlend )
+          && ( geometryShader == rhs.geometryShader )
+          && ( tessellationShader == rhs.tessellationShader )
+          && ( sampleRateShading == rhs.sampleRateShading )
+          && ( dualSrcBlend == rhs.dualSrcBlend )
+          && ( logicOp == rhs.logicOp )
+          && ( multiDrawIndirect == rhs.multiDrawIndirect )
+          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+          && ( depthClamp == rhs.depthClamp )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( fillModeNonSolid == rhs.fillModeNonSolid )
+          && ( depthBounds == rhs.depthBounds )
+          && ( wideLines == rhs.wideLines )
+          && ( largePoints == rhs.largePoints )
+          && ( alphaToOne == rhs.alphaToOne )
+          && ( multiViewport == rhs.multiViewport )
+          && ( samplerAnisotropy == rhs.samplerAnisotropy )
+          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+          && ( textureCompressionBC == rhs.textureCompressionBC )
+          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+          && ( shaderClipDistance == rhs.shaderClipDistance )
+          && ( shaderCullDistance == rhs.shaderCullDistance )
+          && ( shaderFloat64 == rhs.shaderFloat64 )
+          && ( shaderInt64 == rhs.shaderInt64 )
+          && ( shaderInt16 == rhs.shaderInt16 )
+          && ( shaderResourceResidency == rhs.shaderResourceResidency )
+          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+          && ( sparseBinding == rhs.sparseBinding )
+          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+          && ( variableMultisampleRate == rhs.variableMultisampleRate )
+          && ( inheritedQueries == rhs.inheritedQueries );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
+    VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
 
+  struct DeviceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
 
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
+    {}
 
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} )
+    : flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
+      return *this;
+    }
 
+    DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
 
+    DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = queueCreateInfoCount_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueCreateInfos = pQueueCreateInfos_;
+      return *this;
+    }
 
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
+      pQueueCreateInfos = queueCreateInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
 
+    DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+      ppEnabledLayerNames = pEnabledLayerNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
+    DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledFeatures = pEnabledFeatures_;
+      return *this;
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
+    }
 
+    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceCreateInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceCreateInfo const& ) const = default;
+#else
+    bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
+          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
+    uint32_t queueCreateInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
+    uint32_t enabledLayerCount = {};
+    const char* const * ppEnabledLayerNames = {};
+    uint32_t enabledExtensionCount = {};
+    const char* const * ppEnabledExtensionNames = {};
+    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  };
+  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
+  {
+    using Type = DeviceCreateInfo;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  struct DeviceDeviceMemoryReportCreateInfoEXT
+  {
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
 
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endConditionalRenderingEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
+    DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
 
+    DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endRenderPass( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+    PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
+    void* pUserData = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
+  {
+    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  struct DeviceDiagnosticsConfigCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
 
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
+    DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
 
+    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
+  {
+    using Type = DeviceDiagnosticsConfigCreateInfoNV;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  struct DeviceEventInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
 
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
+    : deviceEvent( deviceEvent_ )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
+    DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceEvent = deviceEvent_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+    operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setBlendConstants( const float blendConstants[4], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceEventInfoEXT const& ) const = default;
+#else
+    bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceEvent == rhs.deviceEvent );
+    }
 
+    bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
 
+  };
+  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
+  {
+    using Type = DeviceEventInfoEXT;
+  };
 
+  struct DeviceGroupBindSparseInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
+    {}
 
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
+      return *this;
+    }
 
+    DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceDeviceIndex = resourceDeviceIndex_;
+      return *this;
+    }
 
+    DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryDeviceIndex = memoryDeviceIndex_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+    operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDeviceMask( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+    const void* pNext = {};
+    uint32_t resourceDeviceIndex = {};
+    uint32_t memoryDeviceIndex = {};
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
+  {
+    using Type = DeviceGroupBindSparseInfo;
+  };
+  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  struct DeviceGroupCommandBufferBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
 
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceMask( deviceMask_ )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
+    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
+    DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setLineWidth( float lineWidth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 
+    operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask );
+    }
 
+    bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+    const void* pNext = {};
+    uint32_t deviceMask = {};
 
+  };
+  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
+  {
+    using Type = DeviceGroupCommandBufferBeginInfo;
+  };
+  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
 
+  class DisplayKHR
+  {
+  public:
+    using CType = VkDisplayKHR;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
 
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
 
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR( displayKHR )
+    {}
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = displayKHR;
+      return *this;
+    }
+#endif
 
+    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = VK_NULL_HANDLE;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayKHR const& ) const = default;
+#else
+    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == rhs.m_displayKHR;
+    }
 
+    bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != rhs.m_displayKHR;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR < rhs.m_displayKHR;
+    }
+#endif
 
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != VK_NULL_HANDLE;
+    }
 
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == VK_NULL_HANDLE;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  private:
+    VkDisplayKHR m_displayKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
 
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
 
+  struct PerformanceConfigurationAcquireInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
+    : type( type_ )
+    {}
 
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
+    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
+      return *this;
+    }
 
+    PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
 
+    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type );
+    }
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
 
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  };
+  static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
+  {
+    using Type = PerformanceConfigurationAcquireInfoINTEL;
+  };
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  class PerformanceConfigurationINTEL
+  {
+  public:
+    using CType = VkPerformanceConfigurationINTEL;
 
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
 
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+  public:
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
 
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result end( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
+    {}
 
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
+      return *this;
+    }
+#endif
 
+    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+      return *this;
+    }
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceConfigurationINTEL const& ) const = default;
 #else
-    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+    }
 
+    bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+    }
 
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+    bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer;
+      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL;
     }
 
     explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer != VK_NULL_HANDLE;
+      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
     }
 
     bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return m_commandBuffer == VK_NULL_HANDLE;
+      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
     }
 
   private:
-    VkCommandBuffer m_commandBuffer;
+    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
   };
-  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
 
   template <>
-  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePerformanceConfigurationINTEL>
   {
-    using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+    using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
   };
 
   template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
   {
-    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+    using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
   };
 
 
-  template <>
-  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
-  {
-    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
-  };
-
 
   template <>
-  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
   {
     static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
@@ -42930,6 +44228,275 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = SubmitInfo;
   };
 
+  struct SemaphoreSubmitInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ = {}, uint32_t deviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), value( value_ ), stageMask( stageMask_ ), deviceIndex( deviceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreSubmitInfoKHR( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreSubmitInfoKHR( *reinterpret_cast<SemaphoreSubmitInfoKHR const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfoKHR & operator=( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreSubmitInfoKHR & operator=( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreSubmitInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreSubmitInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value = value_;
+      return *this;
+    }
+
+    SemaphoreSubmitInfoKHR & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageMask = stageMask_;
+      return *this;
+    }
+
+    SemaphoreSubmitInfoKHR & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndex = deviceIndex_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreSubmitInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreSubmitInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreSubmitInfoKHR const& ) const = default;
+#else
+    bool operator==( SemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( value == rhs.value )
+          && ( stageMask == rhs.stageMask )
+          && ( deviceIndex == rhs.deviceIndex );
+    }
+
+    bool operator!=( SemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    uint64_t value = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask = {};
+    uint32_t deviceIndex = {};
+
+  };
+  static_assert( sizeof( SemaphoreSubmitInfoKHR ) == sizeof( VkSemaphoreSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreSubmitInfoKHR>
+  {
+    using Type = SemaphoreSubmitInfoKHR;
+  };
+
+  struct SubmitInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubmitInfo2KHR(VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ), pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ), commandBufferInfoCount( commandBufferInfoCount_ ), pCommandBufferInfos( pCommandBufferInfos_ ), signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ), pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubmitInfo2KHR( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubmitInfo2KHR( *reinterpret_cast<SubmitInfo2KHR const *>( &rhs ) )
+    {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo2KHR( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const & commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & signalSemaphoreInfos_ = {} )
+    : flags( flags_ ), waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) ), pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ), commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) ), pCommandBufferInfos( commandBufferInfos_.data() ), signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) ), pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2KHR & operator=( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubmitInfo2KHR & operator=( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    SubmitInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubmitInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubmitInfo2KHR & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
+      return *this;
+    }
+
+    SubmitInfo2KHR & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo2KHR & setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
+      pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubmitInfo2KHR & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferInfoCount = commandBufferInfoCount_;
+      return *this;
+    }
+
+    SubmitInfo2KHR & setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBufferInfos = pCommandBufferInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo2KHR & setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
+      pCommandBufferInfos = commandBufferInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubmitInfo2KHR & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
+      return *this;
+    }
+
+    SubmitInfo2KHR & setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo2KHR & setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
+      pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSubmitInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubmitInfo2KHR*>( this );
+    }
+
+    operator VkSubmitInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubmitInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubmitInfo2KHR const& ) const = default;
+#else
+    bool operator==( SubmitInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount )
+          && ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos )
+          && ( commandBufferInfoCount == rhs.commandBufferInfoCount )
+          && ( pCommandBufferInfos == rhs.pCommandBufferInfos )
+          && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount )
+          && ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
+    }
+
+    bool operator!=( SubmitInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags = {};
+    uint32_t waitSemaphoreInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos = {};
+    uint32_t commandBufferInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos = {};
+    uint32_t signalSemaphoreInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos = {};
+
+  };
+  static_assert( sizeof( SubmitInfo2KHR ) == sizeof( VkSubmitInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubmitInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubmitInfo2KHR>
+  {
+    using Type = SubmitInfo2KHR;
+  };
+
   class Queue
   {
   public:
@@ -42986,6 +44553,16 @@ namespace VULKAN_HPP_NAMESPACE
 
 
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getCheckpointData2NV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV* pCheckpointData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CheckpointData2NVAllocator, typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
     void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
     template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -43049,6 +44626,14 @@ namespace VULKAN_HPP_NAMESPACE
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
     VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -46989,14 +48574,14 @@ namespace VULKAN_HPP_NAMESPACE
     template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
@@ -47104,14 +48689,14 @@ namespace VULKAN_HPP_NAMESPACE
     template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
@@ -47208,14 +48793,14 @@ namespace VULKAN_HPP_NAMESPACE
     template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
@@ -47228,14 +48813,14 @@ namespace VULKAN_HPP_NAMESPACE
     template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
@@ -47342,14 +48927,14 @@ namespace VULKAN_HPP_NAMESPACE
     template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
     VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
     VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
 #  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
@@ -47936,7 +49521,7 @@ namespace VULKAN_HPP_NAMESPACE
     void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR* pSizeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
     template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
-    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
@@ -69809,6 +71394,88 @@ namespace VULKAN_HPP_NAMESPACE
     using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
   };
 
+  struct PhysicalDeviceSynchronization2FeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}) VULKAN_HPP_NOEXCEPT
+    : synchronization2( synchronization2_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSynchronization2FeaturesKHR( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSynchronization2FeaturesKHR( *reinterpret_cast<PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2FeaturesKHR & operator=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSynchronization2FeaturesKHR & operator=( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSynchronization2FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSynchronization2FeaturesKHR & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      synchronization2 = synchronization2_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSynchronization2FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSynchronization2FeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceSynchronization2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSynchronization2FeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSynchronization2FeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSynchronization2FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( synchronization2 == rhs.synchronization2 );
+    }
+
+    bool operator!=( PhysicalDeviceSynchronization2FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSynchronization2FeaturesKHR ) == sizeof( VkPhysicalDeviceSynchronization2FeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSynchronization2FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2FeaturesKHR>
+  {
+    using Type = PhysicalDeviceSynchronization2FeaturesKHR;
+  };
+
   struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
   {
     static const bool allowDuplicate = false;
@@ -75278,6 +76945,76 @@ namespace VULKAN_HPP_NAMESPACE
   };
   using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
 
+  struct QueueFamilyCheckpointProperties2NV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2Nv;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyCheckpointProperties2NV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
+    }
+
+    operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueueFamilyCheckpointProperties2NV const& ) const = default;
+#else
+    bool operator==( QueueFamilyCheckpointProperties2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+    }
+
+    bool operator!=( QueueFamilyCheckpointProperties2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2Nv;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {};
+
+  };
+  static_assert( sizeof( QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyCheckpointProperties2NV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2Nv>
+  {
+    using Type = QueueFamilyCheckpointProperties2NV;
+  };
+
   struct QueueFamilyCheckpointPropertiesNV
   {
     static const bool allowDuplicate = false;
@@ -81047,6 +82784,21 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
@@ -81106,6 +82858,13 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
@@ -81256,6 +83015,21 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const DependencyInfoKHR & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
@@ -81583,6 +83357,30 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWaitEvents2KHR( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
+#else
+    if ( events.size() != dependencyInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdWaitEvents2KHR( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
@@ -81613,6 +83411,13 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
@@ -81626,6 +83431,13 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
 
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template <typename Dispatch>
   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
@@ -82277,7 +84089,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82322,7 +84134,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82580,7 +84392,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82625,7 +84437,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82856,7 +84668,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82901,7 +84713,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82936,7 +84748,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82981,7 +84793,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     Pipeline pipeline;
     Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -83238,7 +85050,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     SwapchainKHR swapchain;
     Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
@@ -83283,7 +85095,7 @@ namespace VULKAN_HPP_NAMESPACE
   }
 
   template <typename Dispatch>
-  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
     SwapchainKHR swapchain;
     Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
@@ -89879,6 +91691,39 @@ namespace VULKAN_HPP_NAMESPACE
 
 
   template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV* pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointData2NV *>( pCheckpointData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CheckpointData2NVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( Dispatch const & d ) const
+  {
+    std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    return checkpointData;
+  }
+
+  template <typename CheckpointData2NVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
+  {
+    std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    return checkpointData;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
   VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointDataNV *>( pCheckpointData ) );
@@ -90013,6 +91858,22 @@ namespace VULKAN_HPP_NAMESPACE
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template <typename Dispatch>
   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
@@ -90124,6 +91985,7 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
   template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryBarrier2KHR, SubpassDependency2>{ enum { value = true }; };
   template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
   template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
   template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
@@ -90131,6 +91993,7 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorPoolCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo2KHR>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
@@ -90312,6 +92175,8 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
@@ -90380,6 +92245,7 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
   template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>{ enum { value = true }; };
   template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
   template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
   template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
@@ -90389,6 +92255,7 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
   template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
   template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier2KHR>{ enum { value = true }; };
   template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
   template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
@@ -90420,9 +92287,11 @@ namespace VULKAN_HPP_NAMESPACE
   template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
 #ifdef VK_USE_PLATFORM_WIN32_KHR
   template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo2KHR>{ enum { value = true }; };
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 #ifdef VK_USE_PLATFORM_WIN32_KHR
   template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2KHR>{ enum { value = true }; };
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
   template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
   template <> struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>{ enum { value = true }; };
@@ -90631,11 +92500,13 @@ namespace VULKAN_HPP_NAMESPACE
     PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
     PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
     PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+    PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
     PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
     PFN_vkCmdPushConstants vkCmdPushConstants = 0;
     PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
     PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
     PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+    PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
     PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
     PFN_vkCmdResolveImage vkCmdResolveImage = 0;
     PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
@@ -90653,6 +92524,7 @@ namespace VULKAN_HPP_NAMESPACE
     PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
     PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
     PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+    PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
     PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
     PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
     PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
@@ -90681,10 +92553,13 @@ namespace VULKAN_HPP_NAMESPACE
     PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
     PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
     PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+    PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
     PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
     PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+    PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
     PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
     PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+    PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
     PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
     PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
     PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
@@ -90966,6 +92841,7 @@ namespace VULKAN_HPP_NAMESPACE
     PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
     PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
     PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+    PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
     PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
 #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
     PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
@@ -91010,6 +92886,7 @@ namespace VULKAN_HPP_NAMESPACE
     PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
     PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
     PFN_vkQueueSubmit vkQueueSubmit = 0;
+    PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
     PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
     PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
     PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
@@ -91361,11 +93238,13 @@ namespace VULKAN_HPP_NAMESPACE
       vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
       if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
       vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
       vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
       vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
       vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
       vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
       vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) );
       vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
       vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
       vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
@@ -91384,6 +93263,7 @@ namespace VULKAN_HPP_NAMESPACE
       if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
       vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
       vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
       vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
       vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
       vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
@@ -91412,10 +93292,13 @@ namespace VULKAN_HPP_NAMESPACE
       vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
       vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
       vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
       vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
       vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
       vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
       vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
       vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
       vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
       vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
@@ -91583,6 +93466,7 @@ namespace VULKAN_HPP_NAMESPACE
       vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
       vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
       vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
       vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
       vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
       vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
@@ -91623,6 +93507,7 @@ namespace VULKAN_HPP_NAMESPACE
       vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
       vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
       vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
       vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
       vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
       vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
@@ -91770,11 +93655,13 @@ namespace VULKAN_HPP_NAMESPACE
       vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
       if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
       vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
       vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
       vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
       vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
       vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
       vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
       vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
       vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
       vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
@@ -91793,6 +93680,7 @@ namespace VULKAN_HPP_NAMESPACE
       if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
       vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
       vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
       vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
       vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
       vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
@@ -91821,10 +93709,13 @@ namespace VULKAN_HPP_NAMESPACE
       vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
       vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
       vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
       vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
       vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
       vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
       vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
       vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
       vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
       vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
@@ -91992,6 +93883,7 @@ namespace VULKAN_HPP_NAMESPACE
       vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
       vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
       vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
       vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
       vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
       vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
@@ -92032,6 +93924,7 @@ namespace VULKAN_HPP_NAMESPACE
       vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
       vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
       vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
       vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
       vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
       vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
index 9ba5731..1bbdab8 100644 (file)
@@ -43,7 +43,7 @@ extern "C" {
 #define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0
 
 // Version of this file
-#define VK_HEADER_VERSION 169
+#define VK_HEADER_VERSION 170
 
 // Complete version of this file
 #define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
@@ -600,6 +600,16 @@ typedef enum VkStructureType {
     VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000,
     VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
     VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = 1000314000,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = 1000314001,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = 1000314002,
+    VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = 1000314003,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = 1000314004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = 1000314005,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = 1000314006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = 1000314007,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
     VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = 1000325000,
     VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
     VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
@@ -761,6 +771,8 @@ typedef enum VkImageLayout {
     VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
     VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
     VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = 1000314000,
+    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = 1000314001,
     VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
     VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
     VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
@@ -1510,6 +1522,7 @@ typedef enum VkAccessFlagBits {
     VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
     VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000,
     VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000,
+    VK_ACCESS_NONE_KHR = 0,
     VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
     VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
     VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
@@ -1699,6 +1712,7 @@ typedef enum VkPipelineStageFlagBits {
     VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000,
     VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
     VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+    VK_PIPELINE_STAGE_NONE_KHR = 0,
     VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
     VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
     VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
@@ -1727,6 +1741,11 @@ typedef enum VkFenceCreateFlagBits {
 } VkFenceCreateFlagBits;
 typedef VkFlags VkFenceCreateFlags;
 typedef VkFlags VkSemaphoreCreateFlags;
+
+typedef enum VkEventCreateFlagBits {
+    VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = 0x00000001,
+    VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkEventCreateFlagBits;
 typedef VkFlags VkEventCreateFlags;
 
 typedef enum VkQueryPipelineStatisticFlagBits {
@@ -7521,6 +7540,250 @@ typedef struct VkPipelineLibraryCreateInfoKHR {
 #define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info"
 
 
+#define VK_KHR_synchronization2 1
+typedef uint64_t VkFlags64;
+#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1
+#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2"
+typedef VkFlags64 VkPipelineStageFlags2KHR;
+
+// Flag bits for VkPipelineStageFlags2KHR
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_NONE_KHR = 0;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000;
+
+typedef VkFlags64 VkAccessFlags2KHR;
+
+// Flag bits for VkAccessFlags2KHR
+static const VkAccessFlags2KHR VK_ACCESS_2_NONE_KHR = 0;
+static const VkAccessFlags2KHR VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001;
+static const VkAccessFlags2KHR VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002;
+static const VkAccessFlags2KHR VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004;
+static const VkAccessFlags2KHR VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008;
+static const VkAccessFlags2KHR VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100;
+static const VkAccessFlags2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200;
+static const VkAccessFlags2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000;
+static const VkAccessFlags2KHR VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000;
+static const VkAccessFlags2KHR VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000;
+static const VkAccessFlags2KHR VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000;
+static const VkAccessFlags2KHR VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000;
+static const VkAccessFlags2KHR VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000;
+static const VkAccessFlags2KHR VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000;
+
+
+typedef enum VkSubmitFlagBitsKHR {
+    VK_SUBMIT_PROTECTED_BIT_KHR = 0x00000001,
+    VK_SUBMIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSubmitFlagBitsKHR;
+typedef VkFlags VkSubmitFlagsKHR;
+typedef struct VkMemoryBarrier2KHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkPipelineStageFlags2KHR    srcStageMask;
+    VkAccessFlags2KHR           srcAccessMask;
+    VkPipelineStageFlags2KHR    dstStageMask;
+    VkAccessFlags2KHR           dstAccessMask;
+} VkMemoryBarrier2KHR;
+
+typedef struct VkBufferMemoryBarrier2KHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkPipelineStageFlags2KHR    srcStageMask;
+    VkAccessFlags2KHR           srcAccessMask;
+    VkPipelineStageFlags2KHR    dstStageMask;
+    VkAccessFlags2KHR           dstAccessMask;
+    uint32_t                    srcQueueFamilyIndex;
+    uint32_t                    dstQueueFamilyIndex;
+    VkBuffer                    buffer;
+    VkDeviceSize                offset;
+    VkDeviceSize                size;
+} VkBufferMemoryBarrier2KHR;
+
+typedef struct VkImageMemoryBarrier2KHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkPipelineStageFlags2KHR    srcStageMask;
+    VkAccessFlags2KHR           srcAccessMask;
+    VkPipelineStageFlags2KHR    dstStageMask;
+    VkAccessFlags2KHR           dstAccessMask;
+    VkImageLayout               oldLayout;
+    VkImageLayout               newLayout;
+    uint32_t                    srcQueueFamilyIndex;
+    uint32_t                    dstQueueFamilyIndex;
+    VkImage                     image;
+    VkImageSubresourceRange     subresourceRange;
+} VkImageMemoryBarrier2KHR;
+
+typedef struct VkDependencyInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDependencyFlags                   dependencyFlags;
+    uint32_t                            memoryBarrierCount;
+    const VkMemoryBarrier2KHR*          pMemoryBarriers;
+    uint32_t                            bufferMemoryBarrierCount;
+    const VkBufferMemoryBarrier2KHR*    pBufferMemoryBarriers;
+    uint32_t                            imageMemoryBarrierCount;
+    const VkImageMemoryBarrier2KHR*     pImageMemoryBarriers;
+} VkDependencyInfoKHR;
+
+typedef struct VkSemaphoreSubmitInfoKHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSemaphore                 semaphore;
+    uint64_t                    value;
+    VkPipelineStageFlags2KHR    stageMask;
+    uint32_t                    deviceIndex;
+} VkSemaphoreSubmitInfoKHR;
+
+typedef struct VkCommandBufferSubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkCommandBuffer    commandBuffer;
+    uint32_t           deviceMask;
+} VkCommandBufferSubmitInfoKHR;
+
+typedef struct VkSubmitInfo2KHR {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkSubmitFlagsKHR                       flags;
+    uint32_t                               waitSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfoKHR*        pWaitSemaphoreInfos;
+    uint32_t                               commandBufferInfoCount;
+    const VkCommandBufferSubmitInfoKHR*    pCommandBufferInfos;
+    uint32_t                               signalSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfoKHR*        pSignalSemaphoreInfos;
+} VkSubmitInfo2KHR;
+
+typedef struct VkPhysicalDeviceSynchronization2FeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           synchronization2;
+} VkPhysicalDeviceSynchronization2FeaturesKHR;
+
+typedef struct VkQueueFamilyCheckpointProperties2NV {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPipelineStageFlags2KHR    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointProperties2NV;
+
+typedef struct VkCheckpointData2NV {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPipelineStageFlags2KHR    stage;
+    void*                       pCheckpointMarker;
+} VkCheckpointData2NV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, const VkDependencyInfoKHR*                          pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, VkPipelineStageFlags2KHR                            stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer                   commandBuffer, uint32_t                                            eventCount, const VkEvent*                     pEvents, const VkDependencyInfoKHR*         pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer                   commandBuffer, const VkDependencyInfoKHR*                                pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2KHR                            stage, VkQueryPool                                         queryPool, uint32_t                                            query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue                           queue, uint32_t                            submitCount, const VkSubmitInfo2KHR*           pSubmits, VkFence           fence);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2KHR                            stage, VkBuffer                                            dstBuffer, VkDeviceSize                                        dstOffset, uint32_t                                            marker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    const VkDependencyInfoKHR*                  pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags2KHR                    stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    const VkDependencyInfoKHR*                  pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkDependencyInfoKHR*                  pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2KHR                    stage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo2KHR*                     pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2KHR                    stage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointData2NV*                        pCheckpointData);
+#endif
+
+
 #define VK_KHR_zero_initialize_workgroup_memory 1
 #define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1
 #define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory"
index 18bc417..e18248d 100644 (file)
@@ -502,6 +502,12 @@ class OutputGenerator:
                 exit(1)
 
             body += self.genRequirements(name, mustBeFound = False)
+            # Some C compilers only allow initializing a 'static const' variable with a literal value.
+            # So initializing an alias from another 'static const' value would fail to compile.
+            # Work around this by chasing the aliases to get the actual value.
+            while numVal is None:
+                alias = self.registry.tree.find("enums/enum[@name='" + strVal + "']")
+                (numVal, strVal) = self.enumToValue(alias, True)
             body += "static const {} {} = {};\n".format(flagTypeName, name, strVal)
 
         # Postfix
index e80c628..8496d3c 100644 (file)
@@ -1,9 +1,9 @@
 {
   "version info": {
     "schema version": 2,
-    "api version": "1.2.169",
-    "comment": "from git branch: github-main commit: 1eae55f4c780fb0f0aa990071fe158d2a70e7429",
-    "date": "2021-02-02 06:53:27Z"
+    "api version": "1.2.170",
+    "comment": "from git branch: github-main commit: 815e07c850d642d996292f5cdec25d41ecdff0d0",
+    "date": "2021-02-15 05:28:41Z"
   },
   "validation": {
     "vkGetInstanceProcAddr": {
         },
         {
           "vuid": "VUID-VkQueueFamilyProperties2-pNext-pNext",
-          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueueFamilyCheckpointPropertiesNV\">VkQueueFamilyCheckpointPropertiesNV</a>"
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueueFamilyCheckpointProperties2NV\">VkQueueFamilyCheckpointProperties2NV</a> or <a href=\"#VkQueueFamilyCheckpointPropertiesNV\">VkQueueFamilyCheckpointPropertiesNV</a>"
         },
         {
           "vuid": "VUID-VkQueueFamilyProperties2-sType-unique",
         }
       ]
     },
+    "VkQueueFamilyCheckpointProperties2NV": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkQueueFamilyCheckpointProperties2NV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV</code>"
+        }
+      ]
+    },
     "VkQueueFamilyCheckpointPropertiesNV": {
       "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)": [
         {
         },
         {
           "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext",
-          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceDeviceMemoryReportCreateInfoEXT\">VkDeviceDeviceMemoryReportCreateInfoEXT</a>, <a href=\"#VkDeviceDiagnosticsConfigCreateInfoNV\">VkDeviceDiagnosticsConfigCreateInfoNV</a>, <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>, <a href=\"#VkDeviceMemoryOverallocationCreateInfoAMD\">VkDeviceMemoryOverallocationCreateInfoAMD</a>, <a href=\"#VkDevicePrivateDataCreateInfoEXT\">VkDevicePrivateDataCreateInfoEXT</a>, <a href=\"#VkPhysicalDevice16BitStorageFeatures\">VkPhysicalDevice16BitStorageFeatures</a>, <a href=\"#VkPhysicalDevice4444FormatsFeaturesEXT\">VkPhysicalDevice4444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDevice8BitStorageFeatures\">VkPhysicalDevice8BitStorageFeatures</a>, <a href=\"#VkPhysicalDeviceASTCDecodeFeaturesEXT\">VkPhysicalDeviceASTCDecodeFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceAccelerationStructureFeaturesKHR\">VkPhysicalDeviceAccelerationStructureFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT\">VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeatures\">VkPhysicalDeviceBufferDeviceAddressFeatures</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesEXT\">VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCoherentMemoryFeaturesAMD\">VkPhysicalDeviceCoherentMemoryFeaturesAMD</a>, <a href=\"#VkPhysicalDeviceComputeShaderDerivativesFeaturesNV\">VkPhysicalDeviceComputeShaderDerivativesFeaturesNV</a>, <a href=\"#VkPhysicalDeviceConditionalRenderingFeaturesEXT\">VkPhysicalDeviceConditionalRenderingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixFeaturesNV\">VkPhysicalDeviceCooperativeMatrixFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCornerSampledImageFeaturesNV\">VkPhysicalDeviceCornerSampledImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCoverageReductionModeFeaturesNV\">VkPhysicalDeviceCoverageReductionModeFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCustomBorderColorFeaturesEXT\">VkPhysicalDeviceCustomBorderColorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV\">VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDepthClipEnableFeaturesEXT\">VkPhysicalDeviceDepthClipEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingFeatures\">VkPhysicalDeviceDescriptorIndexingFeatures</a>, <a href=\"#VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV\">VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDeviceMemoryReportFeaturesEXT\">VkPhysicalDeviceDeviceMemoryReportFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDiagnosticsConfigFeaturesNV\">VkPhysicalDeviceDiagnosticsConfigFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExclusiveScissorFeaturesNV\">VkPhysicalDeviceExclusiveScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicStateFeaturesEXT\">VkPhysicalDeviceExtendedDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMap2FeaturesEXT\">VkPhysicalDeviceFragmentDensityMap2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapFeaturesEXT\">VkPhysicalDeviceFragmentDensityMapFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV\">VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT\">VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV\">VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateFeaturesKHR\">VkPhysicalDeviceFragmentShadingRateFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceHostQueryResetFeatures\">VkPhysicalDeviceHostQueryResetFeatures</a>, <a href=\"#VkPhysicalDeviceImageRobustnessFeaturesEXT\">VkPhysicalDeviceImageRobustnessFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceImagelessFramebufferFeatures\">VkPhysicalDeviceImagelessFramebufferFeatures</a>, <a href=\"#VkPhysicalDeviceIndexTypeUint8FeaturesEXT\">VkPhysicalDeviceIndexTypeUint8FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationFeaturesEXT\">VkPhysicalDeviceLineRasterizationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMemoryPriorityFeaturesEXT\">VkPhysicalDeviceMemoryPriorityFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMeshShaderFeaturesNV\">VkPhysicalDeviceMeshShaderFeaturesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewFeatures\">VkPhysicalDeviceMultiviewFeatures</a>, <a href=\"#VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE\">VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE</a>, <a href=\"#VkPhysicalDevicePerformanceQueryFeaturesKHR\">VkPhysicalDevicePerformanceQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT\">VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT</a>, <a href=\"#VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR\">VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePortabilitySubsetFeaturesKHR\">VkPhysicalDevicePortabilitySubsetFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePrivateDataFeaturesEXT\">VkPhysicalDevicePrivateDataFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryFeatures\">VkPhysicalDeviceProtectedMemoryFeatures</a>, <a href=\"#VkPhysicalDeviceRayQueryFeaturesKHR\">VkPhysicalDeviceRayQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRayTracingPipelineFeaturesKHR\">VkPhysicalDeviceRayTracingPipelineFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV\">VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV</a>, <a href=\"#VkPhysicalDeviceRobustness2FeaturesEXT\">VkPhysicalDeviceRobustness2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSamplerYcbcrConversionFeatures\">VkPhysicalDeviceSamplerYcbcrConversionFeatures</a>, <a href=\"#VkPhysicalDeviceScalarBlockLayoutFeatures\">VkPhysicalDeviceScalarBlockLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures\">VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures</a>, <a href=\"#VkPhysicalDeviceShaderAtomicFloatFeaturesEXT\">VkPhysicalDeviceShaderAtomicFloatFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderAtomicInt64Features\">VkPhysicalDeviceShaderAtomicInt64Features</a>, <a href=\"#VkPhysicalDeviceShaderClockFeaturesKHR\">VkPhysicalDeviceShaderClockFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT\">VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderDrawParametersFeatures\">VkPhysicalDeviceShaderDrawParametersFeatures</a>, <a href=\"#VkPhysicalDeviceShaderFloat16Int8Features\">VkPhysicalDeviceShaderFloat16Int8Features</a>, <a href=\"#VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT\">VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderImageFootprintFeaturesNV\">VkPhysicalDeviceShaderImageFootprintFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL\">VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsFeaturesNV\">VkPhysicalDeviceShaderSMBuiltinsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures\">VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures</a>, <a href=\"#VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR\">VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShadingRateImageFeaturesNV\">VkPhysicalDeviceShadingRateImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlFeaturesEXT\">VkPhysicalDeviceSubgroupSizeControlFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT\">VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT\">VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphoreFeatures\">VkPhysicalDeviceTimelineSemaphoreFeatures</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackFeaturesEXT\">VkPhysicalDeviceTransformFeedbackFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceUniformBufferStandardLayoutFeatures\">VkPhysicalDeviceUniformBufferStandardLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceVariablePointersFeatures\">VkPhysicalDeviceVariablePointersFeatures</a>, <a href=\"#VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT\">VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVulkan11Features\">VkPhysicalDeviceVulkan11Features</a>, <a href=\"#VkPhysicalDeviceVulkan12Features\">VkPhysicalDeviceVulkan12Features</a>, <a href=\"#VkPhysicalDeviceVulkanMemoryModelFeatures\">VkPhysicalDeviceVulkanMemoryModelFeatures</a>, <a href=\"#VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR\">VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceYcbcrImageArraysFeaturesEXT\">VkPhysicalDeviceYcbcrImageArraysFeaturesEXT</a>, or <a href=\"#VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR\">VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR</a>"
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceDeviceMemoryReportCreateInfoEXT\">VkDeviceDeviceMemoryReportCreateInfoEXT</a>, <a href=\"#VkDeviceDiagnosticsConfigCreateInfoNV\">VkDeviceDiagnosticsConfigCreateInfoNV</a>, <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>, <a href=\"#VkDeviceMemoryOverallocationCreateInfoAMD\">VkDeviceMemoryOverallocationCreateInfoAMD</a>, <a href=\"#VkDevicePrivateDataCreateInfoEXT\">VkDevicePrivateDataCreateInfoEXT</a>, <a href=\"#VkPhysicalDevice16BitStorageFeatures\">VkPhysicalDevice16BitStorageFeatures</a>, <a href=\"#VkPhysicalDevice4444FormatsFeaturesEXT\">VkPhysicalDevice4444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDevice8BitStorageFeatures\">VkPhysicalDevice8BitStorageFeatures</a>, <a href=\"#VkPhysicalDeviceASTCDecodeFeaturesEXT\">VkPhysicalDeviceASTCDecodeFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceAccelerationStructureFeaturesKHR\">VkPhysicalDeviceAccelerationStructureFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT\">VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeatures\">VkPhysicalDeviceBufferDeviceAddressFeatures</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesEXT\">VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCoherentMemoryFeaturesAMD\">VkPhysicalDeviceCoherentMemoryFeaturesAMD</a>, <a href=\"#VkPhysicalDeviceComputeShaderDerivativesFeaturesNV\">VkPhysicalDeviceComputeShaderDerivativesFeaturesNV</a>, <a href=\"#VkPhysicalDeviceConditionalRenderingFeaturesEXT\">VkPhysicalDeviceConditionalRenderingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixFeaturesNV\">VkPhysicalDeviceCooperativeMatrixFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCornerSampledImageFeaturesNV\">VkPhysicalDeviceCornerSampledImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCoverageReductionModeFeaturesNV\">VkPhysicalDeviceCoverageReductionModeFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCustomBorderColorFeaturesEXT\">VkPhysicalDeviceCustomBorderColorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV\">VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDepthClipEnableFeaturesEXT\">VkPhysicalDeviceDepthClipEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingFeatures\">VkPhysicalDeviceDescriptorIndexingFeatures</a>, <a href=\"#VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV\">VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDeviceMemoryReportFeaturesEXT\">VkPhysicalDeviceDeviceMemoryReportFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDiagnosticsConfigFeaturesNV\">VkPhysicalDeviceDiagnosticsConfigFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExclusiveScissorFeaturesNV\">VkPhysicalDeviceExclusiveScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicStateFeaturesEXT\">VkPhysicalDeviceExtendedDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMap2FeaturesEXT\">VkPhysicalDeviceFragmentDensityMap2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapFeaturesEXT\">VkPhysicalDeviceFragmentDensityMapFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV\">VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT\">VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV\">VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateFeaturesKHR\">VkPhysicalDeviceFragmentShadingRateFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceHostQueryResetFeatures\">VkPhysicalDeviceHostQueryResetFeatures</a>, <a href=\"#VkPhysicalDeviceImageRobustnessFeaturesEXT\">VkPhysicalDeviceImageRobustnessFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceImagelessFramebufferFeatures\">VkPhysicalDeviceImagelessFramebufferFeatures</a>, <a href=\"#VkPhysicalDeviceIndexTypeUint8FeaturesEXT\">VkPhysicalDeviceIndexTypeUint8FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationFeaturesEXT\">VkPhysicalDeviceLineRasterizationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMemoryPriorityFeaturesEXT\">VkPhysicalDeviceMemoryPriorityFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMeshShaderFeaturesNV\">VkPhysicalDeviceMeshShaderFeaturesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewFeatures\">VkPhysicalDeviceMultiviewFeatures</a>, <a href=\"#VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE\">VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE</a>, <a href=\"#VkPhysicalDevicePerformanceQueryFeaturesKHR\">VkPhysicalDevicePerformanceQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT\">VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT</a>, <a href=\"#VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR\">VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePortabilitySubsetFeaturesKHR\">VkPhysicalDevicePortabilitySubsetFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePrivateDataFeaturesEXT\">VkPhysicalDevicePrivateDataFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryFeatures\">VkPhysicalDeviceProtectedMemoryFeatures</a>, <a href=\"#VkPhysicalDeviceRayQueryFeaturesKHR\">VkPhysicalDeviceRayQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRayTracingPipelineFeaturesKHR\">VkPhysicalDeviceRayTracingPipelineFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV\">VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV</a>, <a href=\"#VkPhysicalDeviceRobustness2FeaturesEXT\">VkPhysicalDeviceRobustness2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSamplerYcbcrConversionFeatures\">VkPhysicalDeviceSamplerYcbcrConversionFeatures</a>, <a href=\"#VkPhysicalDeviceScalarBlockLayoutFeatures\">VkPhysicalDeviceScalarBlockLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures\">VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures</a>, <a href=\"#VkPhysicalDeviceShaderAtomicFloatFeaturesEXT\">VkPhysicalDeviceShaderAtomicFloatFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderAtomicInt64Features\">VkPhysicalDeviceShaderAtomicInt64Features</a>, <a href=\"#VkPhysicalDeviceShaderClockFeaturesKHR\">VkPhysicalDeviceShaderClockFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT\">VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderDrawParametersFeatures\">VkPhysicalDeviceShaderDrawParametersFeatures</a>, <a href=\"#VkPhysicalDeviceShaderFloat16Int8Features\">VkPhysicalDeviceShaderFloat16Int8Features</a>, <a href=\"#VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT\">VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderImageFootprintFeaturesNV\">VkPhysicalDeviceShaderImageFootprintFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL\">VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsFeaturesNV\">VkPhysicalDeviceShaderSMBuiltinsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures\">VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures</a>, <a href=\"#VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR\">VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShadingRateImageFeaturesNV\">VkPhysicalDeviceShadingRateImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlFeaturesEXT\">VkPhysicalDeviceSubgroupSizeControlFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSynchronization2FeaturesKHR\">VkPhysicalDeviceSynchronization2FeaturesKHR</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT\">VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT\">VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphoreFeatures\">VkPhysicalDeviceTimelineSemaphoreFeatures</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackFeaturesEXT\">VkPhysicalDeviceTransformFeedbackFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceUniformBufferStandardLayoutFeatures\">VkPhysicalDeviceUniformBufferStandardLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceVariablePointersFeatures\">VkPhysicalDeviceVariablePointersFeatures</a>, <a href=\"#VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT\">VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVulkan11Features\">VkPhysicalDeviceVulkan11Features</a>, <a href=\"#VkPhysicalDeviceVulkan12Features\">VkPhysicalDeviceVulkan12Features</a>, <a href=\"#VkPhysicalDeviceVulkanMemoryModelFeatures\">VkPhysicalDeviceVulkanMemoryModelFeatures</a>, <a href=\"#VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR\">VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceYcbcrImageArraysFeaturesEXT\">VkPhysicalDeviceYcbcrImageArraysFeaturesEXT</a>, or <a href=\"#VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR\">VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR</a>"
         },
         {
           "vuid": "VUID-VkDeviceCreateInfo-sType-unique",
         }
       ]
     },
+    "vkQueueSubmit2KHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-synchronization2-03866",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03867",
+          "text": " If a command recorded into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> referenced an <a href=\"#VkEvent\">VkEvent</a>, that event <strong class=\"purple\">must</strong> not be referenced by a command that has been submitted to another queue and is still in the <em>pending state</em>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03868",
+          "text": " The <code>semaphore</code> member of any element of the <code>pSignalSemaphoreInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be unsignaled when the semaphore signal operation it defines is executed on the device"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-stageMask-03869",
+          "text": " The <code>stageMask</code> member of any element of the <code>pSignalSemaphoreInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> only include pipeline stages that are supported by the queue family which <code>queue</code> belongs to"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-stageMask-03870",
+          "text": " The <code>stageMask</code> member of any element of the <code>pWaitSemaphoreInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> only include pipeline stages that are supported by the queue family which <code>queue</code> belongs to"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03871",
+          "text": " When a semaphore wait operation for a binary semaphore is executed, as defined by the <code>semaphore</code> member of any element of the <code>pWaitSemaphoreInfos</code> member of any element of <code>pSubmits</code>, there <strong class=\"purple\">must</strong> be no other queues waiting on the same semaphore"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03872",
+          "text": " The <code>semaphore</code> member of any element of the <code>pWaitSemaphoreInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be semaphores that are signaled, or have <a href=\"#synchronization-semaphores-signaling\">semaphore signal operations</a> previously submitted for execution"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03874",
+          "text": " The <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">pending or executable state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03875",
+          "text": " If a command recorded into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code>, it <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03876",
+          "text": " Any <a href=\"#commandbuffers-secondary\">secondary command buffers recorded</a> into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">pending or executable state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03877",
+          "text": " If any <a href=\"#commandbuffers-secondary\">secondary command buffers recorded</a> into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code>, it <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03878",
+          "text": " The <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> have been allocated from a <code>VkCommandPool</code> that was created for the same queue family <code>queue</code> belongs to"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03879",
+          "text": " If a command recorded into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> includes a <a href=\"#synchronization-queue-transfers-acquire\">Queue Family Transfer Acquire Operation</a>, there <strong class=\"purple\">must</strong> exist a previously submitted <a href=\"#synchronization-queue-transfers-release\">Queue Family Transfer Release Operation</a> on a queue in the queue family identified by the acquire operation, with parameters matching the acquire operation as defined in the definition of such <a href=\"#synchronization-queue-transfers-acquire\">acquire operations</a>, and which happens before the acquire operation"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-pSubmits-parameter",
+          "text": " If <code>submitCount</code> is not <code>0</code>, <code>pSubmits</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>submitCount</code> valid <a href=\"#VkSubmitInfo2KHR\">VkSubmitInfo2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commonparent",
+          "text": " Both of <code>fence</code>, and <code>queue</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_synchronization2)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03873",
+          "text": " Any <code>semaphore</code> member of any element of the <code>pWaitSemaphoreInfos</code> member of any element of <code>pSubmits</code> that was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code> <strong class=\"purple\">must</strong> reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) <strong class=\"purple\">must</strong> have also been submitted for execution"
+        }
+      ],
+      "(VK_KHR_synchronization2)+(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03880",
+          "text": " If a command recorded into the <code>commandBuffer</code> member of any element of the <code>pCommandBufferInfos</code> member of any element of <code>pSubmits</code> was a <a href=\"#vkCmdBeginQuery\">vkCmdBeginQuery</a> whose <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <a href=\"#profiling-lock\">profiling lock</a> <strong class=\"purple\">must</strong> have been held continuously on the <code>VkDevice</code> that <code>queue</code> was retrieved from, throughout recording of those command buffers"
+        }
+      ]
+    },
+    "VkSubmitInfo2KHR": {
+      "(VK_KHR_synchronization2)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03881",
+          "text": " If the same semaphore is used as the <code>semaphore</code> member of both an element of <code>pSignalSemaphoreInfos</code> and <code>pWaitSemaphoreInfos</code>, and that semaphore is a timeline semaphore, the <code>value</code> member of the <code>pSignalSemaphoreInfos</code> element <strong class=\"purple\">must</strong> be greater than the <code>value</code> member of the <code>pWaitSemaphoreInfos</code> element"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03882",
+          "text": " If the <code>semaphore</code> member of any element of <code>pSignalSemaphoreInfos</code> is a timeline semaphore, the <code>value</code> member of that element <strong class=\"purple\">must</strong> have a value greater than the current value of the semaphore when the <a href=\"#synchronization-semaphores-signaling\">semaphore signal operation</a> is executed"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03883",
+          "text": " If the <code>semaphore</code> member of any element of <code>pSignalSemaphoreInfos</code> is a timeline semaphore, the <code>value</code> member of that element <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03884",
+          "text": " If the <code>semaphore</code> member of any element of <code>pWaitSemaphoreInfos</code> is a timeline semaphore, the <code>value</code> member of that element <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>"
+        }
+      ],
+      "(VK_KHR_synchronization2)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-flags-03885",
+          "text": " If the protected memory feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_SUBMIT_PROTECTED_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-flags-03886",
+          "text": " If <code>flags</code> includes <code>VK_SUBMIT_PROTECTED_BIT_KHR</code>, all elements of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be protected command buffers"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-flags-03887",
+          "text": " If <code>flags</code> does not include <code>VK_SUBMIT_PROTECTED_BIT_KHR</code>, each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPerformanceQuerySubmitInfoKHR\">VkPerformanceQuerySubmitInfoKHR</a>, <a href=\"#VkWin32KeyedMutexAcquireReleaseInfoKHR\">VkWin32KeyedMutexAcquireReleaseInfoKHR</a>, or <a href=\"#VkWin32KeyedMutexAcquireReleaseInfoNV\">VkWin32KeyedMutexAcquireReleaseInfoNV</a>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-sType-unique",
+          "text": " The <code>sType</code> value of each struct in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSubmitFlagBitsKHR\">VkSubmitFlagBitsKHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-pWaitSemaphoreInfos-parameter",
+          "text": " If <code>waitSemaphoreInfoCount</code> is not <code>0</code>, <code>pWaitSemaphoreInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreInfoCount</code> valid <a href=\"#VkSemaphoreSubmitInfoKHR\">VkSemaphoreSubmitInfoKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-pCommandBufferInfos-parameter",
+          "text": " If <code>commandBufferInfoCount</code> is not <code>0</code>, <code>pCommandBufferInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>commandBufferInfoCount</code> valid <a href=\"#VkCommandBufferSubmitInfoKHR\">VkCommandBufferSubmitInfoKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo2KHR-pSignalSemaphoreInfos-parameter",
+          "text": " If <code>signalSemaphoreInfoCount</code> is not <code>0</code>, <code>pSignalSemaphoreInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreInfoCount</code> valid <a href=\"#VkSemaphoreSubmitInfoKHR\">VkSemaphoreSubmitInfoKHR</a> structures"
+        }
+      ]
+    },
+    "VkSemaphoreSubmitInfoKHR": {
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-device-03888",
+          "text": " If the <code>device</code> that <code>semaphore</code> was created on is not a device group, <code>deviceIndex</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-{stageMaskName}-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_KHR_device_group_creation,VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-device-03889",
+          "text": " If the <code>device</code> that <code>semaphore</code> was created on is a device group, <code>deviceIndex</code> <strong class=\"purple\">must</strong> be a valid device index"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-parameter",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        }
+      ]
+    },
+    "VkCommandBufferSubmitInfoKHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkCommandBufferSubmitInfoKHR-commandBuffer-03890",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not have been allocated with <code>VK_COMMAND_BUFFER_LEVEL_SECONDARY</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferSubmitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferSubmitInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferSubmitInfoKHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        }
+      ],
+      "(VK_KHR_synchronization2)+(VK_KHR_device_group_creation,VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkCommandBufferSubmitInfoKHR-deviceMask-03891",
+          "text": " If <code>deviceMask</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be a valid device mask"
+        }
+      ]
+    },
     "vkQueueSubmit": {
       "core": [
         {
           "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
         },
         {
-          "vuid": "VUID-VkEventCreateInfo-flags-zerobitmask",
-          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+          "vuid": "VUID-VkEventCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkEventCreateFlagBits\">VkEventCreateFlagBits</a> values"
         }
       ]
     },
       ]
     },
     "vkGetEventStatus": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkGetEventStatus-event-03940",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> not have been created with <code>VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR</code>"
+        }
+      ],
       "core": [
         {
           "vuid": "VUID-vkGetEventStatus-device-parameter",
       ]
     },
     "vkSetEvent": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkSetEvent-event-03941",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> not have been created with <code>VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR</code>"
+        }
+      ],
       "core": [
         {
           "vuid": "VUID-vkSetEvent-device-parameter",
     "vkResetEvent": {
       "core": [
         {
-          "vuid": "VUID-vkResetEvent-event-01148",
-          "text": " <code>event</code> <strong class=\"purple\">must</strong> not be waited on by a <code>vkCmdWaitEvents</code> command that is currently executing"
+          "vuid": "VUID-vkResetEvent-event-03821",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent</code> and the execution of any <a href=\"#vkCmdWaitEvents\">vkCmdWaitEvents</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
         },
         {
           "vuid": "VUID-vkResetEvent-device-parameter",
           "vuid": "VUID-vkResetEvent-event-parent",
           "text": " <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
         }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkResetEvent-event-03822",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent</code> and the execution of any <a href=\"#vkCmdWaitEvents2KHR\">vkCmdWaitEvents2KHR</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
+        },
+        {
+          "vuid": "VUID-vkResetEvent-event-03823",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> not have been created with <code>VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR</code>"
+        }
+      ]
+    },
+    "vkCmdSetEvent2KHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-synchronization2-03824",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-dependencyFlags-03825",
+          "text": " The <code>dependencyFlags</code> member of <code>dependencyInfo</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-srcStageMask-03827",
+          "text": " The <code>srcStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> only include pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-dstStageMask-03828",
+          "text": " The <code>dstStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> only include pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-pDependencyInfo-parameter",
+          "text": " <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDependencyInfoKHR\">VkDependencyInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-03826",
+          "text": " The current device mask of <code>commandBuffer</code> <strong class=\"purple\">must</strong> include exactly one physical device."
+        }
+      ]
+    },
+    "VkDependencyInfoKHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-dependencyFlags-parameter",
+          "text": " <code>dependencyFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDependencyFlagBits\">VkDependencyFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-pMemoryBarriers-parameter",
+          "text": " If <code>memoryBarrierCount</code> is not <code>0</code>, <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryBarrierCount</code> valid <a href=\"#VkMemoryBarrier2KHR\">VkMemoryBarrier2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-pBufferMemoryBarriers-parameter",
+          "text": " If <code>bufferMemoryBarrierCount</code> is not <code>0</code>, <code>pBufferMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bufferMemoryBarrierCount</code> valid <a href=\"#VkBufferMemoryBarrier2KHR\">VkBufferMemoryBarrier2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDependencyInfoKHR-pImageMemoryBarriers-parameter",
+          "text": " If <code>imageMemoryBarrierCount</code> is not <code>0</code>, <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>imageMemoryBarrierCount</code> valid <a href=\"#VkImageMemoryBarrier2KHR\">VkImageMemoryBarrier2KHR</a> structures"
+        }
       ]
     },
     "vkCmdSetEvent": {
           "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:stageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
         }
       ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:stageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
       "(VK_VERSION_1_1,VK_KHR_device_group)": [
         {
           "vuid": "VUID-vkCmdSetEvent-commandBuffer-01152",
         }
       ]
     },
+    "vkCmdResetEvent2KHR": {
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-synchronization2-03829",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03830",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-event-03831",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent2KHR</code> and the execution of any <a href=\"#vkCmdWaitEvents\">vkCmdWaitEvents</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-event-03832",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent2KHR</code> and the execution of any <a href=\"#vkCmdWaitEvents2KHR\">vkCmdWaitEvents2KHR</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-{stageMaskName}-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: stageMa)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-03833",
+          "text": " <code>commandBuffer</code>&#8217;s current device mask <strong class=\"purple\">must</strong> include exactly one physical device."
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-parameter",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-requiredbitmask",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent2KHR-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
     "vkCmdResetEvent": {
       "core": [
         {
           "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_STAGE_HOST_BIT</code>"
         },
         {
-          "vuid": "VUID-vkCmdResetEvent-event-01156",
-          "text": " When this command executes, <code>event</code> <strong class=\"purple\">must</strong> not be waited on by a <code>vkCmdWaitEvents</code> command that is currently executing"
+          "vuid": "VUID-vkCmdResetEvent-event-03834",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent</code> and the execution of any <a href=\"#vkCmdWaitEvents\">vkCmdWaitEvents</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
         },
         {
           "vuid": "VUID-vkCmdResetEvent-commandBuffer-parameter",
           "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:stageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
         }
       ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:stageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-event-03835",
+          "text": " There <strong class=\"purple\">must</strong> be an execution dependency between <code>vkCmdResetEvent</code> and the execution of any <a href=\"#vkCmdWaitEvents2KHR\">vkCmdWaitEvents2KHR</a> that includes <code>event</code> in its <code>pEvents</code> parameter"
+        }
+      ],
       "(VK_VERSION_1_1,VK_KHR_device_group)": [
         {
           "vuid": "VUID-vkCmdResetEvent-commandBuffer-01157",
         }
       ]
     },
+    "vkCmdWaitEvents2KHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-synchronization2-03836",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03837",
+          "text": " Members of <code>pEvents</code> <strong class=\"purple\">must</strong> not have been signaled by <a href=\"#vkCmdSetEvent\">vkCmdSetEvent</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03838",
+          "text": " For any element <span class=\"eq\">i</span> of <code>pEvents</code>, if that event is signaled by <a href=\"#vkCmdSetEvent2KHR\">vkCmdSetEvent2KHR</a>, that command&#8217;s <code>dependencyInfo</code> parameter <strong class=\"purple\">must</strong> be exactly equal to the <span class=\"eq\">i</span>th element of <code>pDependencyInfos</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03839",
+          "text": " For any element <span class=\"eq\">i</span> of <code>pEvents</code>, if that event is signaled by <a href=\"#vkSetEvent\">vkSetEvent</a>, barriers in the <span class=\"eq\">i</span>th element of <code>pDependencyInfos</code> <strong class=\"purple\">must</strong> include only host operations in their first <a href=\"#synchronization-dependencies-scopes\">synchronization scope</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03840",
+          "text": " For any element <span class=\"eq\">i</span> of <code>pEvents</code>, if barriers in the <span class=\"eq\">i</span>th element of <code>pDependencyInfos</code> include only host operations, the <span class=\"eq\">i</span>th element of <code>pEvents</code> <strong class=\"purple\">must</strong> be signaled before <a href=\"#vkCmdWaitEvents2KHR\">vkCmdWaitEvents2KHR</a> is executed"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03841",
+          "text": " For any element <span class=\"eq\">i</span> of <code>pEvents</code>, if barriers in the <span class=\"eq\">i</span>th element of <code>pDependencyInfos</code> do not include host operations, the <span class=\"eq\">i</span>th element of <code>pEvents</code> <strong class=\"purple\">must</strong> be by a corresponding <a href=\"#vkCmdSetEvent2KHR\">vkCmdSetEvent2KHR</a> that occurred earlier in <a href=\"#synchronization-submission-order\">submission order</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-srcStageMask-03842",
+          "text": " The <code>srcStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> either include only pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from, or include only <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-dstStageMask-03843",
+          "text": " The <code>dstStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> only include pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-dependencyFlags-03844",
+          "text": " The <code>dependencyFlags</code> member of any element of <code>dependencyInfo</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03845",
+          "text": " If <code>pEvents</code> includes one or more events that will be signaled by <a href=\"#vkSetEvent\">vkSetEvent</a> after <code>commandBuffer</code> has been submitted to a queue, then <code>vkCmdWaitEvents2KHR</code> <strong class=\"purple\">must</strong> not be called inside a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-03846",
+          "text": " <code>commandBuffer</code>&#8217;s current device mask <strong class=\"purple\">must</strong> include exactly one physical device"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-parameter",
+          "text": " <code>pEvents</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>eventCount</code> valid <a href=\"#VkEvent\">VkEvent</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-pDependencyInfos-parameter",
+          "text": " <code>pDependencyInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>eventCount</code> valid <a href=\"#VkDependencyInfoKHR\">VkDependencyInfoKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-eventCount-arraylength",
+          "text": " <code>eventCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents2KHR-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pEvents</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
     "vkCmdWaitEvents": {
       "core": [
         {
           "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
-          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask",
-          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
-        },
-        {
           "vuid": "VUID-vkCmdWaitEvents-dstStageMask-parameter",
           "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
-          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask",
-          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
-        },
-        {
           "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter",
           "text": " If <code>memoryBarrierCount</code> is not <code>0</code>, <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryBarrierCount</code> valid <a href=\"#VkMemoryBarrier\">VkMemoryBarrier</a> structures"
         },
           "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
         }
       ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pEvents-03847",
+          "text": " Members of <code>pEvents</code> <strong class=\"purple\">must</strong> not have been signaled by <a href=\"#vkCmdSetEvent2KHR\">vkCmdSetEvent2KHR</a>"
+        }
+      ],
       "(VK_VERSION_1_1,VK_KHR_device_group)": [
         {
           "vuid": "VUID-vkCmdWaitEvents-commandBuffer-01167",
         }
       ]
     },
+    "vkCmdPipelineBarrier2KHR": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-pDependencies-02285",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the render pass <strong class=\"purple\">must</strong> have been created with at least one <a href=\"#VkSubpassDependency\">VkSubpassDependency</a> instance in <code>VkRenderPassCreateInfo</code>::<code>pDependencies</code> that expresses a dependency from the current subpass to itself, with <a href=\"#synchronization-dependencies-scopes\">synchronization scopes</a> and <a href=\"#synchronization-dependencies-access-scopes\">access scopes</a> that are all supersets of the scopes defined in this command"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-bufferMemoryBarrierCount-01178",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, it <strong class=\"purple\">must</strong> not include any buffer memory barriers"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-image-04073",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the <code>image</code> member of any image memory barrier included in this command <strong class=\"purple\">must</strong> be an attachment used in the current subpass both as an input attachment, and as either a color or depth/stencil attachment"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-oldLayout-01181",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the <code>oldLayout</code> and <code>newLayout</code> members of any image memory barrier included in this command <strong class=\"purple\">must</strong> be equal"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-srcQueueFamilyIndex-01182",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members of any image memory barrier included in this command <strong class=\"purple\">must</strong> be equal"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-synchronization2-03848",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-srcStageMask-03849",
+          "text": " The <code>srcStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> only include pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-dstStageMask-03850",
+          "text": " The <code>dstStageMask</code> member of any element of the <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code>, or <code>pImageMemoryBarriers</code> members of <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> only include pipeline stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-dependencyFlags-01186",
+          "text": " If fname:vkCmdPipelineBarrier2KHR is called outside of a render pass instance, <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code> <strong class=\"purple\">must</strong> not be included in the dependency flags"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-pDependencyInfo-parameter",
+          "text": " <code>pDependencyInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDependencyInfoKHR\">VkDependencyInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        }
+      ]
+    },
     "vkCmdPipelineBarrier": {
       "core": [
         {
           "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
         }
       ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-03937",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
       "(VK_VERSION_1_1,VK_KHR_multiview)": [
         {
           "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-01186",
         }
       ]
     },
+    "VkMemoryBarrier2KHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03900",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03901",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03902",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03903",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03904",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03905",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03906",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03907",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03908",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03909",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03910",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03911",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03912",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03913",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03914",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03915",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03916",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03917",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03900",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03901",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03902",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03903",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03904",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03905",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03906",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03907",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03908",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03909",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03910",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03911",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03912",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03913",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03914",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03915",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03916",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03917",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03918",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03918",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03919",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03919",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03920",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03921",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03922",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03920",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03921",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03922",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03923",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03923",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03924",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03925",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03924",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03925",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03926",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03926",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03927",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03928",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03927",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03928",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ]
+    },
     "VkMemoryBarrier": {
       "core": [
         {
         }
       ]
     },
+    "VkBufferMemoryBarrier2KHR": {
+      "core": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03900",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03901",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03902",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03903",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03904",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03905",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03906",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03907",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03908",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03909",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03910",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03911",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03912",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03913",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03914",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03915",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03916",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03917",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03900",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03901",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03902",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03903",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03904",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03905",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03906",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03907",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03908",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03909",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03910",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03911",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03912",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03913",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03914",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03915",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03916",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03917",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-offset-01187",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-size-01188",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-size-01189",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to than the size of <code>buffer</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-01931",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03851",
+          "text": " If <code>srcStageMask</code> or <code>dstStageMask</code> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be equal"
+        }
+      ],
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03918",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03918",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03919",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03919",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03920",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03921",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03922",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03920",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03921",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03922",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03923",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03923",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03924",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03925",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03924",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03925",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03926",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03926",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03927",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03928",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03927",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03928",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04086",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be valid queue families"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcQueueFamilyIndex-04087",
+          "text": " If <code>srcQueueFamilyIndex</code> is not equal to <code>dstQueueFamilyIndex</code>, at least one <strong class=\"purple\">must</strong> not be a special queue family reserved for external memory ownership transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04088",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, and one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> is a special queue family values reserved for external memory transfers, the other <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04089",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
     "VkBufferMemoryBarrier": {
       "core": [
         {
           "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be valid queue families"
         },
         {
-          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01190",
-          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+          "vuid": "VUID-VkBufferMemoryBarrier-synchronization2-03852",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code> feature</a> is not enabled, and <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
         }
       ],
       "(VK_VERSION_1_1,VK_KHR_external_memory)": [
           "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
         },
         {
-          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01191",
-          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+          "vuid": "VUID-VkBufferMemoryBarrier-synchronization2-03853",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code> feature</a> is not enabled, and <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        }
+      ]
+    },
+    "VkImageMemoryBarrier2KHR": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03900",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03901",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03902",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03903",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03904",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03905",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03906",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03907",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03908",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03909",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03910",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03911",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03912",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03913",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03914",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03915",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03916",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03917",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03900",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03901",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INDEX_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03902",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03903",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03904",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_UNIFORM_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03905",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03906",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03907",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03908",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03909",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>, or one of the <code>VK_PIPELINE_STAGE_*_SHADER_BIT</code> stages"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03910",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03911",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03912",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03913",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03914",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03915",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COPY_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_BLIT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03916",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03917",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_HOST_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01486",
+          "text": " <code>subresourceRange.baseMipLevel</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01724",
+          "text": " If <code>subresourceRange.levelCount</code> is not <code>VK_REMAINING_MIP_LEVELS</code>, <span class=\"eq\"><code>subresourceRange.baseMipLevel</code> &#43; <code>subresourceRange.levelCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01488",
+          "text": " <code>subresourceRange.baseArrayLayer</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01725",
+          "text": " If <code>subresourceRange.layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, <span class=\"eq\"><code>subresourceRange.baseArrayLayer</code> &#43; <code>subresourceRange.layerCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01932",
+          "text": " If <code>image</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01208",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01209",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01210",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01211",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01212",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01213",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01197",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>oldLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or the current layout of the image subresources affected by the barrier"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-newLayout-01198",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>newLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03854",
+          "text": " If <code>srcStageMask</code> or <code>dstStageMask</code> include <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be equal"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03855",
+          "text": " If <code>srcStageMask</code> includes <code>VK_PIPELINE_STAGE_2_HOST_BIT_KHR</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>oldLayout</code> <strong class=\"purple\">must</strong> be one of <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>, <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03918",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03918",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03919",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03919",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03920",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03921",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03922",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03920",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03921",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03922",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:srcStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03923",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:dstStageMask <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03923",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>, <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_NV_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03924",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03925",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03924",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03925",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03926",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03926",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR</code> <code>VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03927",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03928",
+          "text": " If pname:srcAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:srcStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03927",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code>, <code>VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR</code>, or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03928",
+          "text": " If pname:dstAccessMask includes <code>VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR</code>, pname:dstStageMask <strong class=\"purple\">must</strong> include <code>VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01658",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01659",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_2,VK_EXT_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04065",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04066",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04067",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04068",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-03938",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR</code>, <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code> or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-03939",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR</code>, <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-sType-unique",
+          "text": " The <code>sType</code> value of each struct in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits2KHR\">VkAccessFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-parameter",
+          "text": " <code>oldLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-newLayout-parameter",
+          "text": " <code>newLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-parameter",
+          "text": " <code>subresourceRange</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> structure"
+        }
+      ],
+      "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-02088",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR</code> set"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-02902",
+          "text": " If <code>image</code> has a color format, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01671",
+          "text": " If <code>image</code> has a single-plane color format or is not <em>disjoint</em>, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01672",
+          "text": " If <code>image</code> has a multi-planar format and the image is <em>disjoint</em>, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include either at least one of <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, and <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>; or <strong class=\"purple\">must</strong> include <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01673",
+          "text": " If <code>image</code> has a multi-planar format with only two planes, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        }
+      ],
+      "!(VK_VERSION_1_2,VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01207",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil components, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_2,VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-03319",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil and the <a href=\"#features-separateDepthStencilLayouts\">separateDepthStencilLayouts</a> feature is enabled, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include either or both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-03320",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil and the <a href=\"#features-separateDepthStencilLayouts\">separateDepthStencilLayouts</a> feature is not enabled, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04069",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be valid queue families"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04070",
+          "text": " If <code>srcQueueFamilyIndex</code> is not equal to <code>dstQueueFamilyIndex</code>, at least one <strong class=\"purple\">must</strong> not be a special queue family reserved for external memory ownership transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04071",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, and one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> is a special queue family values reserved for external memory transfers, the other <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04072",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
         }
       ]
     },
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01208",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01209",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01210",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01211",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01212",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01213",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01197",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>oldLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or the current layout of the image subresources affected by the barrier"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>oldLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or the current layout of the image subresources affected by the barrier"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-newLayout-01198",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>newLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, <code>newLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-sType-sType",
       "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01658",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01659",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
         }
       ],
       "(VK_VERSION_1_2,VK_EXT_separate_depth_stencil_layouts)": [
         {
           "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04065",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04066",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04067",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
         },
         {
           "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04068",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-03938",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR</code>, <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code> or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-03939",
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR</code>, <code>image</code> <strong class=\"purple\">must</strong> have been created with at least one of <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
         }
       ],
       "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [
         {
           "vuid": "VUID-VkImageMemoryBarrier-oldLayout-02088",
-          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define a <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR</code> set"
+          "text": " If <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> define a <a href=\"#synchronization-queue-transfers\">queue family ownership transfer</a> or <code>oldLayout</code> and <code>newLayout</code> define an <a href=\"#synchronization-image-layout-transitions\">image layout transition</a>, and <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR</code> set"
         }
       ],
       "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
           "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be valid queue families"
         },
         {
-          "vuid": "VUID-VkImageMemoryBarrier-image-01199",
-          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+          "vuid": "VUID-VkImageMemoryBarrier-synchronization2-03856",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code> feature</a> is not enabled, and <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
         }
       ],
       "(VK_VERSION_1_1,VK_KHR_external_memory)": [
           "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not equal, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>"
         },
         {
-          "vuid": "VUID-VkImageMemoryBarrier-image-01381",
-          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+          "vuid": "VUID-VkImageMemoryBarrier-synchronization2-03857",
+          "text": " If the <a href=\"#features-synchronization2\"><code>synchronization2</code> feature</a> is not enabled, and <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
         }
       ]
     },
           "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
-          "vuid": "VUID-VkSubpassDependency-srcStageMask-requiredbitmask",
-          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
-        },
-        {
           "vuid": "VUID-VkSubpassDependency-dstStageMask-parameter",
           "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
-          "vuid": "VUID-VkSubpassDependency-dstStageMask-requiredbitmask",
-          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
-        },
-        {
           "vuid": "VUID-VkSubpassDependency-srcAccessMask-parameter",
           "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
         },
         },
         {
           "vuid": "VUID-VkSubpassDependency2-pNext-pNext",
-          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkMemoryBarrier2KHR\">VkMemoryBarrier2KHR</a>"
         },
         {
-          "vuid": "VUID-VkSubpassDependency2-srcStageMask-parameter",
-          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+          "vuid": "VUID-VkSubpassDependency2-sType-unique",
+          "text": " The <code>sType</code> value of each struct in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
         },
         {
-          "vuid": "VUID-VkSubpassDependency2-srcStageMask-requiredbitmask",
-          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+          "vuid": "VUID-VkSubpassDependency2-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
           "vuid": "VUID-VkSubpassDependency2-dstStageMask-parameter",
           "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
         },
         {
-          "vuid": "VUID-VkSubpassDependency2-dstStageMask-requiredbitmask",
-          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
-        },
-        {
           "vuid": "VUID-VkSubpassDependency2-srcAccessMask-parameter",
           "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
         },
         }
       ]
     },
+    "vkCmdWriteTimestamp2KHR": {
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-synchronization2-03858",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03859",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> only include a single pipeline stage"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03860",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> only include stages valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-03861",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created with a <code>queryType</code> of <code>VK_QUERY_TYPE_TIMESTAMP</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-03862",
+          "text": " The query identified by <code>queryPool</code> and <code>query</code> <strong class=\"purple\">must</strong> be <em>unavailable</em>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-timestampValidBits-03863",
+          "text": " The command pool&#8217;s queue family <strong class=\"purple\">must</strong> support a non-zero <code>timestampValidBits</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-{stageMaskName}-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-None-03864",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be unavailable"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-query-03865",
+          "text": " If <code>vkCmdWriteTimestamp2KHR</code> is called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ],
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-parameter",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-requiredbitmask",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp2KHR-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
     "vkCmdWriteTimestamp": {
       "core": [
         {
         }
       ]
     },
+    "vkCmdWriteBufferMarker2AMD": {
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03929",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03930",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR</code> or <code>VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-synchronization2-03893",
+          "text": " The <a href=\"#features-synchronization2\"><code>synchronization2</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03894",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> include only a single pipeline stage"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03895",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> include only stages that are valid for the queue family that was used to create the command pool that <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstOffset-03896",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>dstBuffer</code> minus <code>4</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-03897",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-03898",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstOffset-03899",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03931",
+          "text": " If the <a href=\"#features-conditionalRendering\">conditional rendering</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03932",
+          "text": " If the <a href=\"#features-fragmentDensityMap\">fragment density map</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03933",
+          "text": " If the <a href=\"#features-transformFeedback\">transform feedback</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03934",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03935",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2[]\n:stageMaskName: sta)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-{stageMaskName}-03936",
+          "text": " If the <a href=\"#features-shadingRateImage\">shading rate image</a> feature is not enabled, pname:{stageMaskName} <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV</code>"
+        }
+      ],
+      "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-parameter",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits2KHR\">VkPipelineStageFlagBits2KHR</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-requiredbitmask",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
     "vkCmdWriteBufferMarkerAMD": {
       "core": [
         {
           "text": " The <code>stride</code> member of <code>pCallableShaderBindingTable</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceRayTracingPipelinePropertiesKHR</code>::<code>maxShaderGroupStride</code>"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysKHR-flags-03695",
-          "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR</code>, the <code>deviceAddress</code> member of <code>pHitShaderBindingTable</code> <strong class=\"purple\">must</strong> not be zero"
-        },
-        {
           "vuid": "VUID-vkCmdTraceRaysKHR-flags-03696",
           "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR</code>, the <code>deviceAddress</code> member of <code>pHitShaderBindingTable</code> <strong class=\"purple\">must</strong> not be zero"
         },
           "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR</code>, entries in <code>pHitShaderBindingTable</code> accessed as a result of this command in order to execute an intersection shader <strong class=\"purple\">must</strong> not be set to zero"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03720",
-          "text": " Any hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
+          "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04735",
+          "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03721",
-          "text": " Any hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
+          "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04736",
+          "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
         },
         {
           "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-04625",
           "text": " The <code>stride</code> member of <code>pCallableShaderBindingTable</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceRayTracingPipelinePropertiesKHR</code>::<code>maxShaderGroupStride</code>"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03695",
-          "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR</code>, the <code>deviceAddress</code> member of <code>pHitShaderBindingTable</code> <strong class=\"purple\">must</strong> not be zero"
-        },
-        {
           "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03696",
           "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR</code>, the <code>deviceAddress</code> member of <code>pHitShaderBindingTable</code> <strong class=\"purple\">must</strong> not be zero"
         },
           "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR</code>, entries in <code>pHitShaderBindingTable</code> accessed as a result of this command in order to execute an intersection shader <strong class=\"purple\">must</strong> not be set to zero"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03720",
-          "text": " Any hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
+          "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04735",
+          "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
         },
         {
-          "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03721",
-          "text": " Any hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
+          "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04736",
+          "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
         },
         {
           "vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03632",
         }
       ]
     },
+    "VkPhysicalDeviceSynchronization2FeaturesKHR": {
+      "(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSynchronization2FeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR</code>"
+        }
+      ]
+    },
     "VkPhysicalDeviceFragmentShadingRateFeaturesKHR": {
       "(VK_KHR_fragment_shading_rate)": [
         {
         }
       ]
     },
+    "vkGetQueueCheckpointData2NV": {
+      "(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-vkGetQueueCheckpointData2NV-queue-03892",
+          "text": " The device that <code>queue</code> belongs to <strong class=\"purple\">must</strong> be in the lost state"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointData2NV-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointData2NV-pCheckpointDataCount-parameter",
+          "text": " <code>pCheckpointDataCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointData2NV-pCheckpointData-parameter",
+          "text": " If the value referenced by <code>pCheckpointDataCount</code> is not <code>0</code>, and <code>pCheckpointData</code> is not <code>NULL</code>, <code>pCheckpointData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pCheckpointDataCount</code> <a href=\"#VkCheckpointData2NV\">VkCheckpointData2NV</a> structures"
+        }
+      ]
+    },
+    "VkCheckpointData2NV": {
+      "(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [
+        {
+          "vuid": "VUID-VkCheckpointData2NV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkCheckpointData2NV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
     "vkGetQueueCheckpointDataNV": {
       "(VK_NV_device_diagnostic_checkpoints)": [
         {
index 21f02a1..571727b 100644 (file)
@@ -139,7 +139,7 @@ branch of the member gitlab server.
         <type category="define">// Vulkan 1.2 version number
 #define <name>VK_API_VERSION_1_2</name> <type>VK_MAKE_VERSION</type>(1, 2, 0)// Patch version should always be set to 0</type>
         <type category="define">// Version of this file
-#define <name>VK_HEADER_VERSION</name> 169</type>
+#define <name>VK_HEADER_VERSION</name> 170</type>
         <type category="define" requires="VK_HEADER_VERSION">// Complete version of this file
 #define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_VERSION</type>(1, 2, VK_HEADER_VERSION)</type>
 
@@ -170,6 +170,7 @@ typedef void <name>CAMetalLayer</name>;
         <type category="basetype">typedef <type>uint32_t</type> <name>VkSampleMask</name>;</type>
         <type category="basetype">typedef <type>uint32_t</type> <name>VkBool32</name>;</type>
         <type category="basetype">typedef <type>uint32_t</type> <name>VkFlags</name>;</type>
+        <type category="basetype">typedef <type>uint64_t</type> <name>VkFlags64</name>;</type>
         <type category="basetype">typedef <type>uint64_t</type> <name>VkDeviceSize</name>;</type>
         <type category="basetype">typedef <type>uint64_t</type> <name>VkDeviceAddress</name>;</type>
 
@@ -228,7 +229,7 @@ typedef void <name>CAMetalLayer</name>;
         <type requires="VkQueryControlFlagBits"           category="bitmask">typedef <type>VkFlags</type> <name>VkQueryControlFlags</name>;</type>
         <type requires="VkQueryResultFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkQueryResultFlags</name>;</type>
         <type requires="VkShaderModuleCreateFlagBits"     category="bitmask">typedef <type>VkFlags</type> <name>VkShaderModuleCreateFlags</name>;</type>
-        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkEventCreateFlags</name>;</type>
+        <type requires="VkEventCreateFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkEventCreateFlags</name>;</type>
         <type requires="VkCommandPoolCreateFlagBits"      category="bitmask">typedef <type>VkFlags</type> <name>VkCommandPoolCreateFlags</name>;</type>
         <type requires="VkCommandPoolResetFlagBits"       category="bitmask">typedef <type>VkFlags</type> <name>VkCommandPoolResetFlags</name>;</type>
         <type requires="VkCommandBufferResetFlagBits"     category="bitmask">typedef <type>VkFlags</type> <name>VkCommandBufferResetFlags</name>;</type>
@@ -268,6 +269,8 @@ typedef void <name>CAMetalLayer</name>;
         <type requires="VkPipelineCompilerControlFlagBitsAMD" category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCompilerControlFlagsAMD</name>;</type>
         <type requires="VkShaderCorePropertiesFlagBitsAMD" category="bitmask">typedef <type>VkFlags</type> <name>VkShaderCorePropertiesFlagsAMD</name>;</type>
         <type requires="VkDeviceDiagnosticsConfigFlagBitsNV" category="bitmask">typedef <type>VkFlags</type> <name>VkDeviceDiagnosticsConfigFlagsNV</name>;</type>
+        <type bitvalues="VkAccessFlagBits2KHR"             category="bitmask">typedef <type>VkFlags64</type> <name>VkAccessFlags2KHR</name>;</type>
+        <type bitvalues="VkPipelineStageFlagBits2KHR"      category="bitmask">typedef <type>VkFlags64</type> <name>VkPipelineStageFlags2KHR</name>;</type>
 
             <comment>WSI extensions</comment>
         <type requires="VkCompositeAlphaFlagBitsKHR"      category="bitmask">typedef <type>VkFlags</type> <name>VkCompositeAlphaFlagsKHR</name>;</type>
@@ -338,6 +341,7 @@ typedef void <name>CAMetalLayer</name>;
         <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineRasterizationDepthClipStateCreateFlagsEXT</name>;</type>
         <type requires="VkSwapchainImageUsageFlagBitsANDROID" category="bitmask">typedef <type>VkFlags</type> <name>VkSwapchainImageUsageFlagsANDROID</name>;</type>
         <type requires="VkToolPurposeFlagBitsEXT"         category="bitmask">typedef <type>VkFlags</type> <name>VkToolPurposeFlagsEXT</name>;</type>
+        <type requires="VkSubmitFlagBitsKHR"              category="bitmask">typedef <type>VkFlags</type> <name>VkSubmitFlagsKHR</name>;</type>
 
             <comment>Types which can be void pointers or class pointers, selected at compile time</comment>
         <type category="handle"                           objtypeenum="VK_OBJECT_TYPE_INSTANCE"><type>VK_DEFINE_HANDLE</type>(<name>VkInstance</name>)</type>
@@ -463,6 +467,7 @@ typedef void <name>CAMetalLayer</name>;
         <type name="VkDescriptorPoolCreateFlagBits" category="enum"/>
         <type name="VkDependencyFlagBits" category="enum"/>
         <type name="VkObjectType" category="enum"/>
+        <type name="VkEventCreateFlagBits" category="enum"/>
             <comment>When VkSemaphoreCreateFlagBits is first extended, need to add a type enum tag for it here</comment>
 
         <comment>Extensions</comment>
@@ -535,6 +540,8 @@ typedef void <name>CAMetalLayer</name>;
         <type name="VkToolPurposeFlagBitsEXT" category="enum"/>
         <type name="VkFragmentShadingRateNV" category="enum"/>
         <type name="VkFragmentShadingRateTypeNV" category="enum"/>
+        <type name="VkAccessFlagBits2KHR" category="enum"/>
+        <type name="VkPipelineStageFlagBits2KHR" category="enum"/>
 
             <comment>WSI extensions</comment>
         <type name="VkColorSpaceKHR" category="enum"/>
@@ -596,6 +603,7 @@ typedef void <name>CAMetalLayer</name>;
         <type category="enum" name="VkShaderFloatControlsIndependenceKHR"          alias="VkShaderFloatControlsIndependence"/>
         <type name="VkSwapchainImageUsageFlagBitsANDROID" category="enum"/>
         <type name="VkFragmentShadingRateCombinerOpKHR" category="enum"/>
+        <type name="VkSubmitFlagBitsKHR" category="enum"/>
 
             <comment>Enumerated types in the header, but not used by the API</comment>
         <type name="VkVendorId" category="enum"/>
@@ -1382,8 +1390,8 @@ typedef void <name>CAMetalLayer</name>;
         <type category="struct" name="VkSubpassDependency">
             <member><type>uint32_t</type>               <name>srcSubpass</name></member>
             <member><type>uint32_t</type>               <name>dstSubpass</name></member>
-            <member><type>VkPipelineStageFlags</type>   <name>srcStageMask</name></member>
-            <member><type>VkPipelineStageFlags</type>   <name>dstStageMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags</type>   <name>srcStageMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags</type>   <name>dstStageMask</name></member>
             <member optional="true"><type>VkAccessFlags</type>          <name>srcAccessMask</name><comment>Memory accesses from the source of the dependency to synchronize</comment></member>
             <member optional="true"><type>VkAccessFlags</type>          <name>dstAccessMask</name><comment>Memory accesses from the destination of the dependency to synchronize</comment></member>
             <member optional="true"><type>VkDependencyFlags</type>      <name>dependencyFlags</name></member>
@@ -1898,7 +1906,7 @@ typedef void <name>CAMetalLayer</name>;
             <member optional="true">const <type>SECURITY_ATTRIBUTES</type>*       <name>pAttributes</name></member>
             <member optional="true"><type>DWORD</type>                            <name>dwAccess</name></member>
         </type>
-        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoNV" structextends="VkSubmitInfo">
+        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoNV" structextends="VkSubmitInfo,VkSubmitInfo2KHR">
             <member values="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
             <member optional="true">const <type>void</type>*                      <name>pNext</name></member>
             <member optional="true"><type>uint32_t</type>                         <name>acquireCount</name></member>
@@ -2239,7 +2247,7 @@ typedef void <name>CAMetalLayer</name>;
             <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
             <member><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
         </type>
-        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoKHR" structextends="VkSubmitInfo">
+        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoKHR" structextends="VkSubmitInfo,VkSubmitInfo2KHR">
             <member values="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member optional="true">const <type>void</type>*                      <name>pNext</name></member>
             <member optional="true"><type>uint32_t</type>         <name>acquireCount</name></member>
@@ -2925,7 +2933,7 @@ typedef void <name>CAMetalLayer</name>;
             <member><type>float</type>                            <name>x</name></member>
             <member><type>float</type>                            <name>y</name></member>
         </type>
-        <type category="struct" name="VkSampleLocationsInfoEXT" structextends="VkImageMemoryBarrier">
+        <type category="struct" name="VkSampleLocationsInfoEXT" structextends="VkImageMemoryBarrier,VkImageMemoryBarrier2KHR">
             <member values="VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
             <member optional="true">const <type>void</type>*                            <name>pNext</name></member>
             <member noautovalidity="true"><type>VkSampleCountFlagBits</type>  <name>sampleLocationsPerPixel</name></member>
@@ -3399,8 +3407,8 @@ typedef void <name>CAMetalLayer</name>;
             <member optional="true">const <type>void</type>* <name>pNext</name></member>
             <member><type>uint32_t</type>                          <name>srcSubpass</name></member>
             <member><type>uint32_t</type>                          <name>dstSubpass</name></member>
-            <member><type>VkPipelineStageFlags</type>              <name>srcStageMask</name></member>
-            <member><type>VkPipelineStageFlags</type>              <name>dstStageMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags</type> <name>srcStageMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags</type> <name>dstStageMask</name></member>
             <member optional="true"><type>VkAccessFlags</type>     <name>srcAccessMask</name></member>
             <member optional="true"><type>VkAccessFlags</type>     <name>dstAccessMask</name></member>
             <member optional="true"><type>VkDependencyFlags</type> <name>dependencyFlags</name></member>
@@ -4292,7 +4300,7 @@ typedef void <name>CAMetalLayer</name>;
             <member optional="true"><type>VkAcquireProfilingLockFlagsKHR</type> <name>flags</name><comment>Acquire profiling lock flags</comment></member>
             <member><type>uint64_t</type> <name>timeout</name></member>
         </type>
-        <type category="struct" name="VkPerformanceQuerySubmitInfoKHR" structextends="VkSubmitInfo">
+        <type category="struct" name="VkPerformanceQuerySubmitInfoKHR" structextends="VkSubmitInfo,VkSubmitInfo2KHR">
             <member values="VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
             <member optional="true">const <type>void</type>*         <name>pNext</name></member>
             <member><type>uint32_t</type>            <name>counterPassIndex</name><comment>Index for which counter pass to submit</comment></member>
@@ -5148,6 +5156,93 @@ typedef void <name>CAMetalLayer</name>;
             <member optional="true"><type>uint32_t</type>                                                         <name>mutableDescriptorTypeListCount</name></member>
             <member len="mutableDescriptorTypeListCount">const <type>VkMutableDescriptorTypeListVALVE</type>* <name>pMutableDescriptorTypeLists</name></member>
         </type>
+        <type category="struct" name="VkMemoryBarrier2KHR" structextends="VkSubpassDependency2">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                               <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>srcStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>srcAccessMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>dstStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>dstAccessMask</name></member>
+        </type>
+        <type category="struct" name="VkImageMemoryBarrier2KHR">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                               <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>srcStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>srcAccessMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>dstStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>dstAccessMask</name></member>
+            <member><type>VkImageLayout</type>                             <name>oldLayout</name></member>
+            <member><type>VkImageLayout</type>                             <name>newLayout</name></member>
+            <member><type>uint32_t</type>                                  <name>srcQueueFamilyIndex</name></member>
+            <member><type>uint32_t</type>                                  <name>dstQueueFamilyIndex</name></member>
+            <member><type>VkImage</type>                                   <name>image</name></member>
+            <member><type>VkImageSubresourceRange</type>                   <name>subresourceRange</name></member>
+        </type>
+        <type category="struct" name="VkBufferMemoryBarrier2KHR">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                               <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>srcStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>srcAccessMask</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>  <name>dstStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags2KHR</type>         <name>dstAccessMask</name></member>
+            <member><type>uint32_t</type>                                  <name>srcQueueFamilyIndex</name></member>
+            <member><type>uint32_t</type>                                  <name>dstQueueFamilyIndex</name></member>
+            <member><type>VkBuffer</type>                                  <name>buffer</name></member>
+            <member><type>VkDeviceSize</type>                              <name>offset</name></member>
+            <member><type>VkDeviceSize</type>                              <name>size</name></member>
+        </type>
+        <type category="struct" name="VkDependencyInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                               <name>pNext</name></member>
+            <member optional="true"><type>VkDependencyFlags</type>         <name>dependencyFlags</name></member>
+            <member optional="true"><type>uint32_t</type>                  <name>memoryBarrierCount</name></member>
+            <member len="memoryBarrierCount">const <type>VkMemoryBarrier2KHR</type>* <name>pMemoryBarriers</name></member>
+            <member optional="true"><type>uint32_t</type>                  <name>bufferMemoryBarrierCount</name></member>
+            <member len="bufferMemoryBarrierCount">const <type>VkBufferMemoryBarrier2KHR</type>* <name>pBufferMemoryBarriers</name></member>
+            <member optional="true"><type>uint32_t</type>                  <name>imageMemoryBarrierCount</name></member>
+            <member len="imageMemoryBarrierCount">const <type>VkImageMemoryBarrier2KHR</type>* <name>pImageMemoryBarriers</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreSubmitInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR"><type>VkStructureType</type>       <name>sType</name></member>
+            <member>const <type>void</type>*                                                                <name>pNext</name></member>
+            <member><type>VkSemaphore</type>                                                                <name>semaphore</name></member>
+            <member><type>uint64_t</type>                                                                   <name>value</name></member>
+            <member optional="true"><type>VkPipelineStageFlags2KHR</type>                                   <name>stageMask</name></member>
+            <member><type>uint32_t</type>                                                                   <name>deviceIndex</name></member>
+        </type>
+        <type category="struct" name="VkCommandBufferSubmitInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR"><type>VkStructureType</type>  <name>sType</name></member>
+            <member>const <type>void</type>*                                                                <name>pNext</name></member>
+            <member><type>VkCommandBuffer</type>                                                            <name>commandBuffer</name></member>
+            <member><type>uint32_t</type>                                                                   <name>deviceMask</name></member>
+        </type>
+        <type category="struct" name="VkSubmitInfo2KHR">
+            <member values="VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR"><type>VkStructureType</type>               <name>sType</name></member>
+            <member>const <type>void</type>*                                                                <name>pNext</name></member>
+            <member optional="true"><type>VkSubmitFlagsKHR</type>                                           <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>                                                   <name>waitSemaphoreInfoCount</name></member>
+            <member len="waitSemaphoreInfoCount">const <type>VkSemaphoreSubmitInfoKHR</type>*               <name>pWaitSemaphoreInfos</name></member>
+            <member optional="true"><type>uint32_t</type>                                                   <name>commandBufferInfoCount</name></member>
+            <member len="commandBufferInfoCount">const <type>VkCommandBufferSubmitInfoKHR</type>*           <name>pCommandBufferInfos</name></member>
+            <member optional="true"><type>uint32_t</type>                                                   <name>signalSemaphoreInfoCount</name></member>
+            <member len="signalSemaphoreInfoCount">const <type>VkSemaphoreSubmitInfoKHR</type>*             <name>pSignalSemaphoreInfos</name></member>
+        </type>
+        <type category="struct" name="VkQueueFamilyCheckpointProperties2NV" structextends="VkQueueFamilyProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*           <name>pNext</name></member>
+            <member><type>VkPipelineStageFlags2KHR</type> <name>checkpointExecutionStageMask</name></member>
+        </type>
+        <type category="struct" name="VkCheckpointData2NV" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkPipelineStageFlags2KHR</type>   <name>stage</name></member>
+            <member noautovalidity="true"><type>void</type>* <name>pCheckpointMarker</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSynchronization2FeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>synchronization2</name></member>
+        </type>
     </types>
 
     <comment>Vulkan enumerant (token) definitions</comment>
@@ -5780,7 +5875,7 @@ typedef void <name>CAMetalLayer</name>;
     <enums name="VkBufferCreateFlagBits" type="bitmask">
         <enum bitpos="0"    name="VK_BUFFER_CREATE_SPARSE_BINDING_BIT"               comment="Buffer should support sparse backing"/>
         <enum bitpos="1"    name="VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"             comment="Buffer should support sparse backing with partial residency"/>
-        <enum bitpos="2"    name="VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"               comment="Buffer should support constent data access to physical memory ranges mapped into multiple locations of sparse buffers"/>
+        <enum bitpos="2"    name="VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"               comment="Buffer should support constant data access to physical memory ranges mapped into multiple locations of sparse buffers"/>
     </enums>
     <enums name="VkShaderStageFlagBits" type="bitmask">
         <enum bitpos="0"    name="VK_SHADER_STAGE_VERTEX_BIT"/>
@@ -5805,7 +5900,7 @@ typedef void <name>CAMetalLayer</name>;
     <enums name="VkImageCreateFlagBits" type="bitmask">
         <enum bitpos="0"    name="VK_IMAGE_CREATE_SPARSE_BINDING_BIT"                comment="Image should support sparse backing"/>
         <enum bitpos="1"    name="VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"              comment="Image should support sparse backing with partial residency"/>
-        <enum bitpos="2"    name="VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"                comment="Image should support constent data access to physical memory ranges mapped into multiple locations of sparse images"/>
+        <enum bitpos="2"    name="VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"                comment="Image should support constant data access to physical memory ranges mapped into multiple locations of sparse images"/>
         <enum bitpos="3"    name="VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"                comment="Allows image views to have different format than the base image"/>
         <enum bitpos="4"    name="VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"               comment="Allows creating image views with cube type from the created image"/>
     </enums>
@@ -5813,7 +5908,7 @@ typedef void <name>CAMetalLayer</name>;
     </enums>
     <enums name="VkSamplerCreateFlagBits" type="bitmask">
     </enums>
-    <enums name="VkPipelineCreateFlagBits" type="bitmask">
+    <enums name="VkPipelineCreateFlagBits" type="bitmask" comment="Note that the gap at bitpos 10 is unused, and can be reserved">
         <enum bitpos="0"    name="VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"/>
         <enum bitpos="1"    name="VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"/>
         <enum bitpos="2"    name="VK_PIPELINE_CREATE_DERIVATIVE_BIT"/>
@@ -6559,6 +6654,64 @@ typedef void <name>CAMetalLayer</name>;
         <enum value="0"  name="VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV"/>
         <enum value="1"  name="VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV"/>
     </enums>
+    <enums name="VkAccessFlagBits2KHR" type="bitmask" bitwidth="64">
+        <enum value="0"     name="VK_ACCESS_2_NONE_KHR"/>
+        <enum bitpos="0"    name="VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR"/>
+        <enum bitpos="1"    name="VK_ACCESS_2_INDEX_READ_BIT_KHR"/>
+        <enum bitpos="2"    name="VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR"/>
+        <enum bitpos="3"    name="VK_ACCESS_2_UNIFORM_READ_BIT_KHR"/>
+        <enum bitpos="4"    name="VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR"/>
+        <enum bitpos="5"    name="VK_ACCESS_2_SHADER_READ_BIT_KHR"/>
+        <enum bitpos="6"    name="VK_ACCESS_2_SHADER_WRITE_BIT_KHR"/>
+        <enum bitpos="7"    name="VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR"/>
+        <enum bitpos="8"    name="VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR"/>
+        <enum bitpos="9"    name="VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR"/>
+        <enum bitpos="10"   name="VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR"/>
+        <enum bitpos="11"   name="VK_ACCESS_2_TRANSFER_READ_BIT_KHR"/>
+        <enum bitpos="12"   name="VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR"/>
+        <enum bitpos="13"   name="VK_ACCESS_2_HOST_READ_BIT_KHR"/>
+        <enum bitpos="14"   name="VK_ACCESS_2_HOST_WRITE_BIT_KHR"/>
+        <enum bitpos="15"   name="VK_ACCESS_2_MEMORY_READ_BIT_KHR"/>
+        <enum bitpos="16"   name="VK_ACCESS_2_MEMORY_WRITE_BIT_KHR"/>
+        <!-- bitpos 17-31 are specified by extensions to the original VkAccessFlagBits enum -->
+        <enum bitpos="32"   name="VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR"/>
+        <enum bitpos="33"   name="VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR"/>
+        <enum bitpos="34"   name="VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR"/>
+    </enums>
+    <enums name="VkPipelineStageFlagBits2KHR" type="bitmask" bitwidth="64">
+        <enum value="0"    name="VK_PIPELINE_STAGE_2_NONE_KHR"/>
+        <enum bitpos="0"    name="VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR"/>
+        <enum bitpos="1"    name="VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR"/>
+        <enum bitpos="2"    name="VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR"/>
+        <enum bitpos="3"    name="VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR"/>
+        <enum bitpos="4"    name="VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR"/>
+        <enum bitpos="5"    name="VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR"/>
+        <enum bitpos="6"    name="VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR"/>
+        <enum bitpos="7"    name="VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR"/>
+        <enum bitpos="8"    name="VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR"/>
+        <enum bitpos="9"    name="VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR"/>
+        <enum bitpos="10"   name="VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR"/>
+        <enum bitpos="11"   name="VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR"/>
+        <enum bitpos="12"   name="VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR"/>
+        <enum               name="VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR" alias="VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR"/>
+        <enum bitpos="13"   name="VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR"/>
+        <enum bitpos="14"   name="VK_PIPELINE_STAGE_2_HOST_BIT_KHR"/>
+        <enum bitpos="15"   name="VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR"/>
+        <enum bitpos="16"   name="VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR"/>
+        <!-- bitpos 17-31 are specified by extensions to the original VkPipelineStageFlagBits enum -->
+        <enum bitpos="32"   name="VK_PIPELINE_STAGE_2_COPY_BIT_KHR"/>
+        <enum bitpos="33"   name="VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR"/>
+        <enum bitpos="34"   name="VK_PIPELINE_STAGE_2_BLIT_BIT_KHR"/>
+        <enum bitpos="35"   name="VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR"/>
+        <enum bitpos="36"   name="VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR"/>
+        <enum bitpos="37"   name="VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR"/>
+        <enum bitpos="38"   name="VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR"/>
+    </enums>
+    <enums name="VkSubmitFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_SUBMIT_PROTECTED_BIT_KHR"/>
+    </enums>
+    <enums name="VkEventCreateFlagBits" type="bitmask">
+    </enums>
 
     <commands comment="Vulkan command definitions">
         <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_LAYER_NOT_PRESENT,VK_ERROR_EXTENSION_NOT_PRESENT,VK_ERROR_INCOMPATIBLE_DRIVER">
@@ -7435,8 +7588,8 @@ typedef void <name>CAMetalLayer</name>;
             <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
             <param><type>uint32_t</type> <name>eventCount</name></param>
             <param len="eventCount">const <type>VkEvent</type>* <name>pEvents</name></param>
-            <param><type>VkPipelineStageFlags</type> <name>srcStageMask</name></param>
-            <param><type>VkPipelineStageFlags</type> <name>dstStageMask</name></param>
+            <param optional="true"><type>VkPipelineStageFlags</type> <name>srcStageMask</name></param>
+            <param optional="true"><type>VkPipelineStageFlags</type> <name>dstStageMask</name></param>
             <param optional="true"><type>uint32_t</type> <name>memoryBarrierCount</name></param>
             <param len="memoryBarrierCount">const <type>VkMemoryBarrier</type>* <name>pMemoryBarriers</name></param>
             <param optional="true"><type>uint32_t</type> <name>bufferMemoryBarrierCount</name></param>
@@ -9306,6 +9459,58 @@ typedef void <name>CAMetalLayer</name>;
             <param optional="true" len="pBuildInfo-&gt;geometryCount">const <type>uint32_t</type>*  <name>pMaxPrimitiveCounts</name></param>
             <param><type>VkAccelerationStructureBuildSizesInfoKHR</type>*           <name>pSizeInfo</name></param>
         </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetEvent2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param><type>VkEvent</type>                                             <name>event</name></param>
+            <param>const <type>VkDependencyInfoKHR</type>*                          <name>pDependencyInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdResetEvent2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param><type>VkEvent</type>                                             <name>event</name></param>
+            <param><type>VkPipelineStageFlags2KHR</type>                            <name>stageMask</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdWaitEvents2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param><type>uint32_t</type>                                            <name>eventCount</name></param>
+            <param len="eventCount">const <type>VkEvent</type>*                     <name>pEvents</name></param>
+            <param len="eventCount">const <type>VkDependencyInfoKHR</type>*         <name>pDependencyInfos</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdPipelineBarrier2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param>const <type>VkDependencyInfoKHR</type>*                                <name>pDependencyInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkQueueSubmit2KHR</name></proto>
+            <param externsync="true"><type>VkQueue</type>                           <name>queue</name></param>
+            <param optional="true"><type>uint32_t</type>                            <name>submitCount</name></param>
+            <param len="submitCount">const <type>VkSubmitInfo2KHR</type>*           <name>pSubmits</name></param>
+            <param optional="true" externsync="true"><type>VkFence</type>           <name>fence</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdWriteTimestamp2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param><type>VkPipelineStageFlags2KHR</type>                            <name>stage</name></param>
+            <param><type>VkQueryPool</type>                                         <name>queryPool</name></param>
+            <param><type>uint32_t</type>                                            <name>query</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdWriteBufferMarker2AMD</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type>                   <name>commandBuffer</name></param>
+            <param><type>VkPipelineStageFlags2KHR</type>                            <name>stage</name></param>
+            <param><type>VkBuffer</type>                                            <name>dstBuffer</name></param>
+            <param><type>VkDeviceSize</type>                                        <name>dstOffset</name></param>
+            <param><type>uint32_t</type>                                            <name>marker</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetQueueCheckpointData2NV</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pCheckpointDataCount</name></param>
+            <param optional="true" len="pCheckpointDataCount"><type>VkCheckpointData2NV</type>* <name>pCheckpointData</name></param>
+        </command>
     </commands>
 
     <feature api="vulkan" name="VK_VERSION_1_0" number="1.0" comment="Vulkan core API interface definitions">
@@ -9509,6 +9714,7 @@ typedef void <name>CAMetalLayer</name>;
         <require comment="Event commands">
             <type name="VkEvent"/>
             <type name="VkEventCreateFlags"/>
+            <type name="VkEventCreateFlagBits"/>
             <type name="VkEventCreateInfo"/>
             <command name="vkCreateEvent"/>
             <command name="vkDestroyEvent"/>
@@ -14164,10 +14370,101 @@ typedef void <name>CAMetalLayer</name>;
                 <enum value="&quot;VK_AMD_extension_314&quot;"              name="VK_AMD_EXTENSION_314_EXTENSION_NAME"/>
             </require>
         </extension>
-        <extension name="VK_AMD_extension_315" number="315" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
-            <require>
-                <enum value="0"                                             name="VK_AMD_EXTENSION_315_SPEC_VERSION"/>
-                <enum value="&quot;VK_AMD_extension_315&quot;"              name="VK_AMD_EXTENSION_315_EXTENSION_NAME"/>
+        <extension name="VK_KHR_synchronization2" number="315" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2,VK_KHR_create_renderpass2" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_synchronization2&quot;"           name="VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME"/>
+                <enum offset="0"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR"/>
+                <enum offset="1"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR"/>
+                <enum offset="2"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR"/>
+                <enum offset="3"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR"/>
+                <enum offset="4"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR"/>
+                <enum offset="5"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR"/>
+                <enum offset="6"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR"/>
+                <enum offset="7"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR"/>
+                <enum bitpos="0"  extends="VkEventCreateFlagBits"           name="VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR"/>
+                <enum offset="0"  extends="VkImageLayout"                   name="VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR"/>
+                <enum offset="1"  extends="VkImageLayout"                   name="VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR"/>
+                <enum value="0"  extends="VkPipelineStageFlagBits"          name="VK_PIPELINE_STAGE_NONE_KHR"/>
+                <enum value="0"  extends="VkAccessFlagBits"                 name="VK_ACCESS_NONE_KHR"/>
+                <type name="VkPipelineStageFlags2KHR"/>
+                <type name="VkPipelineStageFlagBits2KHR"/>
+                <type name="VkAccessFlags2KHR"/>
+                <type name="VkAccessFlagBits2KHR"/>
+                <type name="VkMemoryBarrier2KHR"/>
+                <type name="VkBufferMemoryBarrier2KHR"/>
+                <type name="VkImageMemoryBarrier2KHR"/>
+                <type name="VkDependencyInfoKHR"/>
+                <type name="VkSubmitInfo2KHR"/>
+                <type name="VkSemaphoreSubmitInfoKHR"/>
+                <type name="VkCommandBufferSubmitInfoKHR"/>
+                <type name="VkSubmitFlagBitsKHR"/>
+                <type name="VkSubmitFlagsKHR"/>
+                <type name="VkPhysicalDeviceSynchronization2FeaturesKHR"/>
+                <command name="vkCmdSetEvent2KHR"/>
+                <command name="vkCmdResetEvent2KHR"/>
+                <command name="vkCmdWaitEvents2KHR"/>
+                <command name="vkCmdPipelineBarrier2KHR"/>
+                <command name="vkCmdWriteTimestamp2KHR"/>
+                <command name="vkQueueSubmit2KHR"/>
+            </require>
+            <require extension="VK_EXT_transform_feedback">
+                <enum bitpos="24" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT"/>
+                <enum bitpos="25" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"/>
+                <enum bitpos="26" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"/>
+                <enum bitpos="27" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"/>
+            </require>
+            <require extension="VK_EXT_conditional_rendering">
+                <enum bitpos="18" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" comment="A pipeline stage for conditional rendering predicate fetch"/>
+                <enum bitpos="20" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT"  comment="read access flag for reading conditional rendering predicate"/>
+            </require>
+            <require extension="VK_NV_device_generated_commands">
+                <enum bitpos="17" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV"/>
+                <enum bitpos="17" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV"/>
+                <enum bitpos="18" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV"/>
+            </require>
+            <require extension="VK_KHR_fragment_shading_rate">
+                <enum bitpos="22" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"/>
+                <enum bitpos="23" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"/>
+            </require>
+            <require extension="VK_NV_shading_rate_image">
+                <enum extends="VkPipelineStageFlagBits2KHR"                 name="VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" alias="VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"/>
+                <enum extends="VkAccessFlagBits2KHR"                        name="VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV"    alias="VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"/>
+            </require>
+            <require extension="VK_KHR_acceleration_structure">
+                <enum bitpos="25" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"/>
+                <enum bitpos="21" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR"/>
+                <enum bitpos="22" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"/>
+            </require>
+            <require extension="VK_KHR_ray_tracing_pipeline">
+                <enum bitpos="21" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR"/>
+            </require>
+            <require extension="VK_NV_ray_tracing">
+                <enum extends="VkPipelineStageFlagBits2KHR"                 name="VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV" alias="VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR"/>
+                <enum extends="VkPipelineStageFlagBits2KHR"                 name="VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV" alias="VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"/>
+                <enum extends="VkAccessFlagBits2KHR"                        name="VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV" alias="VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR"/>
+                <enum extends="VkAccessFlagBits2KHR"                        name="VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV" alias="VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"/>
+            </require>
+            <require extension="VK_EXT_fragment_density_map">
+                <enum bitpos="23" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT"/>
+                <enum bitpos="24" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"/>
+            </require>
+            <require extension="VK_EXT_blend_operation_advanced">
+                <enum bitpos="19" extends="VkAccessFlagBits2KHR"            name="VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"/>
+            </require>
+            <require extension="VK_NV_mesh_shader">
+                <enum bitpos="19" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV"/>
+                <enum bitpos="20" extends="VkPipelineStageFlagBits2KHR"     name="VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV"/>
+            </require>
+            <require extension="VK_AMD_buffer_marker">
+                <command name="vkCmdWriteBufferMarker2AMD"/>
+            </require>
+            <require extension="VK_NV_device_diagnostic_checkpoints">
+                <type name="VkQueueFamilyCheckpointProperties2NV"/>
+                <type name="VkCheckpointData2NV"/>
+                <command name="vkGetQueueCheckpointData2NV"/>
+                <enum offset="8" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"/>
+                <enum offset="9" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"/>
             </require>
         </extension>
         <extension name="VK_AMD_extension_316" number="316" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
@@ -14259,6 +14556,7 @@ typedef void <name>CAMetalLayer</name>;
                 <enum value="&quot;VK_NV_extension_328&quot;"               name="VK_NV_EXTENSION_328_EXTENSION_NAME"/>
                 <enum bitpos="5" extends="VkBuildAccelerationStructureFlagBitsKHR" name="VK_BUILD_ACCELERATION_STRUCTURE_RESERVED_BIT_5_NV"/>
                 <enum bitpos="2" extends="VkAccelerationStructureCreateFlagBitsKHR" name="VK_ACCELERATION_STRUCTURE_CREATE_RESERVED_BIT_2_NV"/>
+                <enum bitpos="20"  extends="VkPipelineCreateFlagBits"              name="VK_PIPELINE_CREATE_RESERVED_BIT_20_NV"/>
             </require>
         </extension>
         <extension name="VK_NV_extension_329" number="329" author="NV" contact="Pat Brown @nvpbrown" supported="disabled">