From 26341b4bf0bfe481de1acb7020cb09ddb24e6225 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Tue, 17 Mar 2020 05:02:49 -0700 Subject: [PATCH] Update for Vulkan-Docs 1.2.135 --- include/vulkan/vulkan.h | 5 + include/vulkan/vulkan.hpp | 9316 +++++++++++++++++++++++++++++++----------- include/vulkan/vulkan_core.h | 948 +++-- registry/generator.py | 7 +- registry/genvk.py | 11 +- registry/reg.py | 1 + registry/validusage.json | 3736 +++++++++++++---- registry/vk.xml | 1391 +++++-- 8 files changed, 11400 insertions(+), 4015 deletions(-) diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h index ee3fd3c..20ecd10 100644 --- a/include/vulkan/vulkan.h +++ b/include/vulkan/vulkan.h @@ -83,4 +83,9 @@ #include "vulkan_ggp.h" #endif + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + #endif // VULKAN_H_ diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 2709dac..0891717 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -81,7 +81,7 @@ # include #endif -static_assert( VK_HEADER_VERSION == 134 , "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 135 , "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -861,6 +861,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); @@ -881,7 +886,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); } @@ -901,11 +920,25 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); } - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); @@ -926,6 +959,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); @@ -1081,6 +1121,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); } + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); @@ -1111,9 +1156,9 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); } - void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo ); + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); } void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT @@ -1131,11 +1176,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); } - void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo ); - } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent( commandBuffer, event, stageMask ); @@ -1266,6 +1306,20 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); @@ -1281,7 +1335,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } @@ -1348,7 +1407,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); } - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); } @@ -1383,11 +1447,46 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT { return ::vkCompileDeferredNV( device, pipeline, shader ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); @@ -1413,6 +1512,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); @@ -1463,14 +1569,9 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); } - VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable ); + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); } VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT @@ -1488,6 +1589,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); @@ -1558,7 +1666,19 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); } - void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); } @@ -1578,6 +1698,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyCommandPool( device, commandPool, pAllocator ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); @@ -1628,14 +1755,9 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyImageView( device, imageView, pAllocator ); } - void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator ); - } - - void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator ); + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); } void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -1728,11 +1850,25 @@ namespace VULKAN_HPP_NAMESPACE return ::vkFreeMemory( device, memory, pAllocator ); } - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); @@ -1790,6 +1926,20 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); @@ -1800,6 +1950,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); @@ -1879,6 +2036,11 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); @@ -2002,6 +2164,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); @@ -2123,11 +2297,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); } - VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices ); - } - #ifdef VK_USE_PLATFORM_WIN32_KHR VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { @@ -2230,11 +2399,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUnmapMemory( device, memory ); } - VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices ); - } - void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT { return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); @@ -2265,6 +2429,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_USE_PLATFORM_ANDROID_KHR VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT { @@ -2564,11 +2735,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits ); - } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); @@ -3038,36 +3204,58 @@ namespace VULKAN_HPP_NAMESPACE using DeviceSize = uint64_t; using SampleMask = uint32_t; - enum class AccelerationStructureMemoryRequirementsTypeNV +#ifdef VK_ENABLE_BETA_EXTENSIONS + enum class AccelerationStructureBuildTypeKHR { - eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, - eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR }; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) { switch ( value ) { - case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object"; - case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch"; - case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch"; + case AccelerationStructureBuildTypeKHR::eHost : return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice : return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice"; default: return "invalid"; } } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - enum class AccelerationStructureTypeNV + enum class AccelerationStructureMemoryRequirementsTypeKHR { - eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR }; + using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeKHR value ) { switch ( value ) { - case AccelerationStructureTypeNV::eTopLevel : return "TopLevel"; - case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel"; + case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object"; + case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch"; + default: return "invalid"; + } + } + + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR + }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel"; default: return "invalid"; } } @@ -3095,13 +3283,15 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, - eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, - eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) @@ -3129,13 +3319,13 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; - case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX"; - case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX"; case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR"; case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; - case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV"; - case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV"; case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV"; default: return "invalid"; } } @@ -3441,6 +3631,7 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR @@ -3463,7 +3654,7 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV"; + case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR"; default: return "invalid"; } } @@ -3476,24 +3667,25 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - enum class BuildAccelerationStructureFlagBitsNV : VkBuildAccelerationStructureFlagsNV + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV, - eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV, - ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV, - ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) { switch ( value ) { - case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate"; - case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild"; - case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory"; default: return "invalid"; } } @@ -3821,18 +4013,23 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CopyAccelerationStructureModeNV + enum class CopyAccelerationStructureModeKHR { - eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; - VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value ) + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) { switch ( value ) { - case CopyAccelerationStructureModeNV::eClone : return "Clone"; - case CopyAccelerationStructureModeNV::eCompact : return "Compact"; + case CopyAccelerationStructureModeKHR::eClone : return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact : return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize"; default: return "invalid"; } } @@ -3948,16 +4145,15 @@ namespace VULKAN_HPP_NAMESPACE eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, - eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, - eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT }; VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) @@ -3995,12 +4191,10 @@ namespace VULKAN_HPP_NAMESPACE case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; - case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX"; - case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR"; default: return "invalid"; } } @@ -4132,6 +4326,7 @@ namespace VULKAN_HPP_NAMESPACE eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV }; @@ -4151,7 +4346,7 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; case DescriptorType::eInputAttachment : return "InputAttachment"; case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; - case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV"; + case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; default: return "invalid"; } } @@ -4181,6 +4376,24 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints"; + default: return "invalid"; + } + } + enum class DeviceEventTypeEXT { eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT @@ -5151,6 +5364,7 @@ namespace VULKAN_HPP_NAMESPACE eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, @@ -5193,6 +5407,7 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax"; case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; default: return "invalid"; } @@ -5251,54 +5466,60 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - enum class GeometryFlagBitsNV : VkGeometryFlagsNV + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR { - eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV, - eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) { switch ( value ) { - case GeometryFlagBitsNV::eOpaque : return "Opaque"; - case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; + case GeometryFlagBitsKHR::eOpaque : return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; default: return "invalid"; } } - enum class GeometryInstanceFlagBitsNV : VkGeometryInstanceFlagsNV + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR { - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, - eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) { switch ( value ) { - case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable"; - case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; - case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque"; - case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque"; default: return "invalid"; } } - enum class GeometryTypeNV + enum class GeometryTypeKHR { - eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV, - eAabbs = VK_GEOMETRY_TYPE_AABBS_NV + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR }; + using GeometryTypeNV = GeometryTypeKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) { switch ( value ) { - case GeometryTypeNV::eTriangles : return "Triangles"; - case GeometryTypeNV::eAabbs : return "Aabbs"; + case GeometryTypeKHR::eTriangles : return "Triangles"; + case GeometryTypeKHR::eAabbs : return "Aabbs"; + case GeometryTypeKHR::eInstances : return "Instances"; default: return "invalid"; } } @@ -5556,8 +5777,9 @@ namespace VULKAN_HPP_NAMESPACE { eUint16 = VK_INDEX_TYPE_UINT16, eUint32 = VK_INDEX_TYPE_UINT32, - eNoneNV = VK_INDEX_TYPE_NONE_NV, - eUint8EXT = VK_INDEX_TYPE_UINT8_EXT + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eNoneNV = VK_INDEX_TYPE_NONE_NV }; VULKAN_HPP_INLINE std::string to_string( IndexType value ) @@ -5566,56 +5788,68 @@ namespace VULKAN_HPP_NAMESPACE { case IndexType::eUint16 : return "Uint16"; case IndexType::eUint32 : return "Uint32"; - case IndexType::eNoneNV : return "NoneNV"; + case IndexType::eNoneKHR : return "NoneKHR"; case IndexType::eUint8EXT : return "Uint8EXT"; default: return "invalid"; } } - enum class IndirectCommandsLayoutUsageFlagBitsNVX : VkIndirectCommandsLayoutUsageFlagsNVX + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV { - eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, - eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, - eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, - eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV }; - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) { switch ( value ) { - case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences"; default: return "invalid"; } } - enum class IndirectCommandsTokenTypeNVX + enum class IndirectCommandsTokenTypeNV { - ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, - eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV }; - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) { switch ( value ) { - case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline"; - case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet"; - case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeNVX::eDraw : return "Draw"; - case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch"; + case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw : return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks"; + default: return "invalid"; + } + } + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface"; default: return "invalid"; } } @@ -5788,44 +6022,6 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ObjectEntryTypeNVX - { - eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, - eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value ) - { - switch ( value ) - { - case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet"; - case ObjectEntryTypeNVX::ePipeline : return "Pipeline"; - case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer"; - case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer"; - case ObjectEntryTypeNVX::ePushConstant : return "PushConstant"; - default: return "invalid"; - } - } - - enum class ObjectEntryUsageFlagBitsNVX : VkObjectEntryUsageFlagsNVX - { - eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, - eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value ) - { - switch ( value ) - { - case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics"; - case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute"; - default: return "invalid"; - } - } - enum class ObjectType { eUnknown = VK_OBJECT_TYPE_UNKNOWN, @@ -5861,14 +6057,15 @@ namespace VULKAN_HPP_NAMESPACE eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, - eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, - eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, - eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, - eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV }; VULKAN_HPP_INLINE std::string to_string( ObjectType value ) @@ -5908,12 +6105,12 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eDisplayKHR : return "DisplayKHR"; case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case ObjectType::eObjectTableNVX : return "ObjectTableNVX"; - case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; - case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV"; case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV"; default: return "invalid"; } } @@ -6128,6 +6325,7 @@ namespace VULKAN_HPP_NAMESPACE { eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV }; @@ -6137,17 +6335,23 @@ namespace VULKAN_HPP_NAMESPACE { case PipelineBindPoint::eGraphics : return "Graphics"; case PipelineBindPoint::eCompute : return "Compute"; - case PipelineBindPoint::eRayTracingNV : return "RayTracingNV"; + case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR"; default: return "invalid"; } } - enum class PipelineCacheCreateFlagBits - {}; + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits ) + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) { - return "(void)"; + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT"; + default: return "invalid"; + } } enum class PipelineCacheHeaderVersion @@ -6187,9 +6391,19 @@ namespace VULKAN_HPP_NAMESPACE eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR }; @@ -6203,9 +6417,19 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDerivative : return "Derivative"; case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR"; case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT"; + case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT"; default: return "invalid"; } } @@ -6333,13 +6557,15 @@ namespace VULKAN_HPP_NAMESPACE eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, - eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) @@ -6365,13 +6591,13 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits::eAllCommands : return "AllCommands"; case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX"; + case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR"; case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV"; - case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV"; case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV"; default: return "invalid"; } } @@ -6592,8 +6818,10 @@ namespace VULKAN_HPP_NAMESPACE eTimestamp = VK_QUERY_TYPE_TIMESTAMP, eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV }; VULKAN_HPP_INLINE std::string to_string( QueryType value ) @@ -6605,7 +6833,8 @@ namespace VULKAN_HPP_NAMESPACE case QueryType::eTimestamp : return "Timestamp"; case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR"; - case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV"; + case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR"; case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; default: return "invalid"; } @@ -6669,20 +6898,21 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class RayTracingShaderGroupTypeNV + enum class RayTracingShaderGroupTypeKHR { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, - eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; - VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) { switch ( value ) { - case RayTracingShaderGroupTypeNV::eGeneral : return "General"; - case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup"; + case RayTracingShaderGroupTypeKHR::eGeneral : return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup"; default: return "invalid"; } } @@ -6756,9 +6986,15 @@ namespace VULKAN_HPP_NAMESPACE eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR, eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, @@ -6800,9 +7036,15 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; + case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR"; case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; + case Result::eThreadIdleKHR : return "ThreadIdleKHR"; + case Result::eThreadDoneKHR : return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR : return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR"; + case Result::eErrorPipelineCompileRequiredEXT : return "ErrorPipelineCompileRequiredEXT"; default: return "invalid"; } } @@ -7085,14 +7327,20 @@ namespace VULKAN_HPP_NAMESPACE eCompute = VK_SHADER_STAGE_COMPUTE_BIT, eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, - eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, - eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) @@ -7107,12 +7355,12 @@ namespace VULKAN_HPP_NAMESPACE case ShaderStageFlagBits::eCompute : return "Compute"; case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; case ShaderStageFlagBits::eAll : return "All"; - case ShaderStageFlagBits::eRaygenNV : return "RaygenNV"; - case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV"; - case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV"; - case ShaderStageFlagBits::eMissNV : return "MissNV"; - case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV"; - case ShaderStageFlagBits::eCallableNV : return "CallableNV"; + case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR : return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR : return "CallableKHR"; case ShaderStageFlagBits::eTaskNV : return "TaskNV"; case ShaderStageFlagBits::eMeshNV : return "MeshNV"; default: return "invalid"; @@ -7476,12 +7724,6 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, - eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, - eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, - eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, - eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, - eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, - eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, @@ -7545,6 +7787,27 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_KHR, + eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR, + eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR, + ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, @@ -7565,8 +7828,6 @@ namespace VULKAN_HPP_NAMESPACE eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, - eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, @@ -7643,6 +7904,7 @@ namespace VULKAN_HPP_NAMESPACE ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR, ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, @@ -7650,10 +7912,22 @@ namespace VULKAN_HPP_NAMESPACE ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, @@ -7736,6 +8010,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, @@ -7996,12 +8272,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; - case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX"; - case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX"; - case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX"; - case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX"; - case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX"; - case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX"; case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; @@ -8065,6 +8335,27 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureInfoKHR : return "AccelerationStructureInfoKHR"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR"; + case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR"; case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; @@ -8085,8 +8376,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eGeometryNV : return "GeometryNV"; case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; - case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; @@ -8163,6 +8452,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR"; case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; @@ -8170,10 +8460,22 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV"; default: return "invalid"; } } @@ -8447,7 +8749,8 @@ namespace VULKAN_HPP_NAMESPACE { eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, - eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT }; VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) @@ -8457,6 +8760,7 @@ namespace VULKAN_HPP_NAMESPACE case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf"; default: return "invalid"; } } @@ -8536,7 +8840,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) + allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) }; }; @@ -8586,13 +8890,13 @@ namespace VULKAN_HPP_NAMESPACE if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; - if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | "; - if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | "; if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | "; if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | "; if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -8708,7 +9012,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) + allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR) }; }; @@ -8750,7 +9054,7 @@ namespace VULKAN_HPP_NAMESPACE if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | "; + if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -8761,46 +9065,48 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } - using BuildAccelerationStructureFlagsNV = Flags; + using BuildAccelerationStructureFlagsKHR = Flags; - template <> struct FlagTraits + template <> struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory) + allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) | bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator&( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) & bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator^( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) ^ bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( BuildAccelerationStructureFlagsNV( bits ) ); + return ~( BuildAccelerationStructureFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value ) + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -9504,6 +9810,47 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + using DeviceGroupPresentModeFlagsKHR = Flags; template <> struct FlagTraits @@ -10108,7 +10455,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) + allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) }; }; @@ -10161,6 +10508,7 @@ namespace VULKAN_HPP_NAMESPACE if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -10204,85 +10552,89 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using GeometryFlagsNV = Flags; + using GeometryFlagsKHR = Flags; - template <> struct FlagTraits + template <> struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) + allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) | bit1; + return GeometryFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator&( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) & bit1; + return GeometryFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator^( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) ^ bit1; + return GeometryFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( GeometryFlagsNV( bits ) ); + return ~( GeometryFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value ) + using GeometryFlagsNV = GeometryFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | "; - if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; + if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using GeometryInstanceFlagsNV = Flags; + using GeometryInstanceFlagsKHR = Flags; - template <> struct FlagTraits + template <> struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque) + allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) | bit1; + return GeometryInstanceFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator&( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) & bit1; + return GeometryInstanceFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator^( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) ^ bit1; + return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( GeometryInstanceFlagsNV( bits ) ); + return ~( GeometryInstanceFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value ) + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | "; - if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; - if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | "; - if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -10524,45 +10876,83 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using IndirectCommandsLayoutUsageFlagsNVX = Flags; + using IndirectCommandsLayoutUsageFlagsNV = Flags; - template <> struct FlagTraits + template <> struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator&( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) & bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator^( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) ^ bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT { - return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); + return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) { if ( !value ) return "{}"; std::string result; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using IndirectStateFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectStateFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -10751,46 +11141,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_METAL_EXT*/ - using ObjectEntryUsageFlagsNVX = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator&( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator^( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ObjectEntryUsageFlagsNVX( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | "; - if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - using PeerMemoryFeatureFlags = Flags; template <> struct FlagTraits @@ -10877,9 +11227,41 @@ namespace VULKAN_HPP_NAMESPACE using PipelineCacheCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags ) + template <> struct FlagTraits { - return "{}"; + enum : VkFlags + { + allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCacheCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; } using PipelineColorBlendStateCreateFlags = Flags; @@ -10947,7 +11329,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) + allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT) }; }; @@ -10981,9 +11363,19 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | "; if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -11176,7 +11568,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) + allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) }; }; @@ -11224,13 +11616,13 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | "; if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | "; - if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | "; if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } @@ -11727,7 +12119,7 @@ namespace VULKAN_HPP_NAMESPACE { enum : VkFlags { - allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) }; }; @@ -11762,12 +12154,12 @@ namespace VULKAN_HPP_NAMESPACE if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; - if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | "; - if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | "; - if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | "; - if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | "; - if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | "; - if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | "; if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; @@ -12552,6 +12944,15 @@ namespace VULKAN_HPP_NAMESPACE : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} }; + class IncompatibleVersionKHRError : public SystemError + { + public: + IncompatibleVersionKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + IncompatibleVersionKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + }; + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError { public: @@ -12579,6 +12980,15 @@ namespace VULKAN_HPP_NAMESPACE : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} }; + class PipelineCompileRequiredEXTError : public SystemError + { + public: + PipelineCompileRequiredEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorPipelineCompileRequiredEXT ), message ) {} + PipelineCompileRequiredEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorPipelineCompileRequiredEXT ), message ) {} + }; + [[noreturn]] static void throwResultException( Result result, char const * message ) { switch ( result ) @@ -12606,9 +13016,11 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message ); case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); + case Result::eErrorPipelineCompileRequiredEXT: throw PipelineCompileRequiredEXTError( message ); default: throw SystemError( make_error_code( result ) ); } } @@ -12740,9 +13152,49 @@ namespace VULKAN_HPP_NAMESPACE } #endif + struct AabbPositionsKHR; + using AabbPositionsNV = AabbPositionsKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildGeometryInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildOffsetInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateGeometryTypeInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AccelerationStructureCreateInfoNV; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureDeviceAddressInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryAabbsDataKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + union AccelerationStructureGeometryDataKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryInstancesDataKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryTrianglesDataKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AccelerationStructureInfoNV; + struct AccelerationStructureInstanceKHR; + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureMemoryRequirementsInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AccelerationStructureMemoryRequirementsInfoNV; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureVersionKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AcquireNextImageInfoKHR; struct AcquireProfilingLockInfoKHR; struct AllocationCallbacks; @@ -12772,7 +13224,8 @@ namespace VULKAN_HPP_NAMESPACE struct AttachmentSampleLocationsEXT; struct BaseInStructure; struct BaseOutStructure; - struct BindAccelerationStructureMemoryInfoNV; + struct BindAccelerationStructureMemoryInfoKHR; + using BindAccelerationStructureMemoryInfoNV = BindAccelerationStructureMemoryInfoKHR; struct BindBufferMemoryDeviceGroupInfo; using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; struct BindBufferMemoryInfo; @@ -12784,7 +13237,10 @@ namespace VULKAN_HPP_NAMESPACE struct BindImageMemorySwapchainInfoKHR; struct BindImagePlaneMemoryInfo; using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct BindIndexBufferIndirectCommandNV; + struct BindShaderGroupIndirectCommandNV; struct BindSparseInfo; + struct BindVertexBufferIndirectCommandNV; struct BufferCopy; struct BufferCreateInfo; struct BufferDeviceAddressCreateInfoEXT; @@ -12805,8 +13261,6 @@ namespace VULKAN_HPP_NAMESPACE struct ClearDepthStencilValue; struct ClearRect; union ClearValue; - struct CmdProcessCommandsInfoNVX; - struct CmdReserveSpaceForCommandsInfoNVX; struct CoarseSampleLocationNV; struct CoarseSampleOrderCustomNV; struct CommandBufferAllocateInfo; @@ -12821,7 +13275,16 @@ namespace VULKAN_HPP_NAMESPACE struct ConformanceVersion; using ConformanceVersionKHR = ConformanceVersion; struct CooperativeMatrixPropertiesNV; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureToMemoryInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct CopyDescriptorSet; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyMemoryToAccelerationStructureInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct D3D12FenceSubmitInfoKHR; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -12837,6 +13300,9 @@ namespace VULKAN_HPP_NAMESPACE struct DedicatedAllocationBufferCreateInfoNV; struct DedicatedAllocationImageCreateInfoNV; struct DedicatedAllocationMemoryAllocateInfoNV; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct DeferredOperationInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct DescriptorBufferInfo; struct DescriptorImageInfo; struct DescriptorPoolCreateInfo; @@ -12858,9 +13324,8 @@ namespace VULKAN_HPP_NAMESPACE struct DescriptorUpdateTemplateEntry; using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; struct DeviceCreateInfo; + struct DeviceDiagnosticsConfigCreateInfoNV; struct DeviceEventInfoEXT; - struct DeviceGeneratedCommandsFeaturesNVX; - struct DeviceGeneratedCommandsLimitsNVX; struct DeviceGroupBindSparseInfo; using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; struct DeviceGroupCommandBufferBeginInfo; @@ -12877,6 +13342,12 @@ namespace VULKAN_HPP_NAMESPACE struct DeviceMemoryOpaqueCaptureAddressInfo; using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; struct DeviceMemoryOverallocationCreateInfoAMD; +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressConstKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct DeviceQueueCreateInfo; struct DeviceQueueGlobalPriorityCreateInfoEXT; struct DeviceQueueInfo2; @@ -12959,11 +13430,15 @@ namespace VULKAN_HPP_NAMESPACE using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; struct FramebufferCreateInfo; struct FramebufferMixedSamplesCombinationNV; + struct GeneratedCommandsInfoNV; + struct GeneratedCommandsMemoryRequirementsInfoNV; struct GeometryAABBNV; struct GeometryDataNV; struct GeometryNV; struct GeometryTrianglesNV; struct GraphicsPipelineCreateInfo; + struct GraphicsPipelineShaderGroupsCreateInfoNV; + struct GraphicsShaderGroupCreateInfoNV; struct HdrMetadataEXT; struct HeadlessSurfaceCreateInfoEXT; #ifdef VK_USE_PLATFORM_IOS_MVK @@ -13021,9 +13496,9 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportSemaphoreWin32HandleInfoKHR; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutCreateInfoNVX; - struct IndirectCommandsLayoutTokenNVX; - struct IndirectCommandsTokenNVX; + struct IndirectCommandsLayoutCreateInfoNV; + struct IndirectCommandsLayoutTokenNV; + struct IndirectCommandsStreamNV; struct InitializePerformanceApiInfoINTEL; struct InputAttachmentAspectReference; using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; @@ -13065,13 +13540,6 @@ namespace VULKAN_HPP_NAMESPACE struct MetalSurfaceCreateInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ struct MultisamplePropertiesEXT; - struct ObjectTableCreateInfoNVX; - struct ObjectTableDescriptorSetEntryNVX; - struct ObjectTableEntryNVX; - struct ObjectTableIndexBufferEntryNVX; - struct ObjectTablePipelineEntryNVX; - struct ObjectTablePushConstantEntryNVX; - struct ObjectTableVertexBufferEntryNVX; struct Offset2D; struct Offset3D; struct PastPresentationTimingGOOGLE; @@ -13112,6 +13580,9 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; struct PhysicalDeviceDescriptorIndexingProperties; using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + struct PhysicalDeviceDiagnosticsConfigFeaturesNV; struct PhysicalDeviceDiscardRectanglePropertiesEXT; struct PhysicalDeviceDriverProperties; using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; @@ -13169,6 +13640,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDevicePCIBusInfoPropertiesEXT; struct PhysicalDevicePerformanceQueryFeaturesKHR; struct PhysicalDevicePerformanceQueryPropertiesKHR; + struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT; struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; struct PhysicalDevicePointClippingProperties; using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; @@ -13178,6 +13650,12 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceProtectedMemoryFeatures; struct PhysicalDeviceProtectedMemoryProperties; struct PhysicalDevicePushDescriptorPropertiesKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingFeaturesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceRayTracingPropertiesNV; struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; struct PhysicalDeviceSampleLocationsPropertiesEXT; @@ -13261,6 +13739,9 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineInfoKHR; struct PipelineInputAssemblyStateCreateInfo; struct PipelineLayoutCreateInfo; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PipelineLibraryCreateInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PipelineMultisampleStateCreateInfo; struct PipelineRasterizationConservativeStateCreateInfoEXT; struct PipelineRasterizationDepthClipStateCreateInfoEXT; @@ -13294,13 +13775,23 @@ namespace VULKAN_HPP_NAMESPACE struct ProtectedSubmitInfo; struct PushConstantRange; struct QueryPoolCreateInfo; - struct QueryPoolCreateInfoINTEL; struct QueryPoolPerformanceCreateInfoKHR; + struct QueryPoolPerformanceQueryCreateInfoINTEL; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; struct QueueFamilyCheckpointPropertiesNV; struct QueueFamilyProperties; struct QueueFamilyProperties2; using QueueFamilyProperties2KHR = QueueFamilyProperties2; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineCreateInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct RayTracingPipelineCreateInfoNV; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineInterfaceCreateInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingShaderGroupCreateInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct RayTracingShaderGroupCreateInfoNV; struct Rect2D; struct RectLayerKHR; @@ -13340,6 +13831,7 @@ namespace VULKAN_HPP_NAMESPACE using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; struct SemaphoreWaitInfo; using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + struct SetStateFlagsIndirectCommandNV; struct ShaderModuleCreateInfo; struct ShaderModuleValidationCacheCreateInfoEXT; struct ShaderResourceUsageAMD; @@ -13363,6 +13855,9 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_GGP struct StreamDescriptorSurfaceCreateInfoGGP; #endif /*VK_USE_PLATFORM_GGP*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct StridedBufferRegionKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct SubmitInfo; struct SubpassBeginInfo; using SubpassBeginInfoKHR = SubpassBeginInfo; @@ -13399,6 +13894,11 @@ namespace VULKAN_HPP_NAMESPACE struct TextureLODGatherFormatPropertiesAMD; struct TimelineSemaphoreSubmitInfo; using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct TraceRaysIndirectCommandKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct TransformMatrixKHR; + using TransformMatrixNV = TransformMatrixKHR; struct ValidationCacheCreateInfoEXT; struct ValidationFeaturesEXT; struct ValidationFlagsEXT; @@ -13424,7 +13924,8 @@ namespace VULKAN_HPP_NAMESPACE struct Win32SurfaceCreateInfoKHR; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct WriteDescriptorSet; - struct WriteDescriptorSetAccelerationStructureNV; + struct WriteDescriptorSetAccelerationStructureKHR; + using WriteDescriptorSetAccelerationStructureNV = WriteDescriptorSetAccelerationStructureKHR; struct WriteDescriptorSetInlineUniformBlockEXT; struct XYColorEXT; #ifdef VK_USE_PLATFORM_XCB_KHR @@ -14619,84 +15120,85 @@ namespace VULKAN_HPP_NAMESPACE using type = Image; }; - class AccelerationStructureNV + class AccelerationStructureKHR { public: - using CType = VkAccelerationStructureNV; + using CType = VkAccelerationStructureKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureKHR; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR(VK_NULL_HANDLE) {} - VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR(VK_NULL_HANDLE) {} - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV( accelerationStructureNV ) + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) {} #if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT + AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT { - m_accelerationStructureNV = accelerationStructureNV; + m_accelerationStructureKHR = accelerationStructureKHR; return *this; } #endif - AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - m_accelerationStructureNV = VK_NULL_HANDLE; + m_accelerationStructureKHR = VK_NULL_HANDLE; return *this; } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureNV const& ) const = default; + auto operator<=>( AccelerationStructureKHR const& ) const = default; #else - bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; } - bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; } - bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; } #endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV; + return m_accelerationStructureKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV != VK_NULL_HANDLE; + return m_accelerationStructureKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV == VK_NULL_HANDLE; + return m_accelerationStructureKHR == VK_NULL_HANDLE; } private: - VkAccelerationStructureNV m_accelerationStructureNV; + VkAccelerationStructureKHR m_accelerationStructureKHR; }; - static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" ); + static_assert( sizeof( AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct cpp_type { - using type = AccelerationStructureNV; + using type = AccelerationStructureKHR; }; + using AccelerationStructureNV = AccelerationStructureKHR; class DescriptorUpdateTemplate { @@ -14979,6 +15481,9 @@ namespace VULKAN_HPP_NAMESPACE void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template @@ -15002,11 +15507,29 @@ namespace VULKAN_HPP_NAMESPACE void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -15030,8 +15553,26 @@ namespace VULKAN_HPP_NAMESPACE void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; @@ -15061,6 +15602,15 @@ namespace VULKAN_HPP_NAMESPACE void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; @@ -15179,6 +15729,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template @@ -15213,10 +15770,10 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -15237,13 +15794,6 @@ namespace VULKAN_HPP_NAMESPACE void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template - void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template @@ -15369,6 +15919,24 @@ namespace VULKAN_HPP_NAMESPACE void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; @@ -15387,10 +15955,17 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -15757,6 +16332,87 @@ namespace VULKAN_HPP_NAMESPACE using type = PipelineCache; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeferredOperationKHR; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeferredOperationKHR const& ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } + + bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + + bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR; + }; + static_assert( sizeof( DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DeferredOperationKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + class DescriptorPool { public: @@ -15994,162 +16650,83 @@ namespace VULKAN_HPP_NAMESPACE using type = Framebuffer; }; - class IndirectCommandsLayoutNVX + class IndirectCommandsLayoutNV { public: - using CType = VkIndirectCommandsLayoutNVX; + using CType = VkIndirectCommandsLayoutNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNV; public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) {} - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) {} - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) {} #if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT { - m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; return *this; } #endif - IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; + m_indirectCommandsLayoutNV = VK_NULL_HANDLE; return *this; } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutNVX const& ) const = default; + auto operator<=>( IndirectCommandsLayoutNV const& ) const = default; #else - bool operator==( IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; } - bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; } - bool operator<(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; } #endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX; + return m_indirectCommandsLayoutNV; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; } private: - VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV; }; - static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); + static_assert( sizeof( IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct cpp_type { - using type = IndirectCommandsLayoutNVX; - }; - - class ObjectTableNVX - { - public: - using CType = VkObjectTableNVX; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX; - - public: - VULKAN_HPP_CONSTEXPR ObjectTableNVX() VULKAN_HPP_NOEXCEPT - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) VULKAN_HPP_NOEXCEPT - : m_objectTableNVX( objectTableNVX ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) VULKAN_HPP_NOEXCEPT - { - m_objectTableNVX = objectTableNVX; - return *this; - } -#endif - - ObjectTableNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_objectTableNVX = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableNVX const& ) const = default; -#else - bool operator==( ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX == rhs.m_objectTableNVX; - } - - bool operator!=(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX != rhs.m_objectTableNVX; - } - - bool operator<(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX < rhs.m_objectTableNVX; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX == VK_NULL_HANDLE; - } - - private: - VkObjectTableNVX m_objectTableNVX; - }; - static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = ObjectTableNVX; + using type = IndirectCommandsLayoutNV; }; class RenderPass @@ -16692,8 +17269,9 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_SMART_HANDLE class Device; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueAccelerationStructureNV = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueAccelerationStructureKHR = UniqueHandle; + using UniqueAccelerationStructureNV = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; using UniqueBuffer = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; @@ -16702,6 +17280,10 @@ namespace VULKAN_HPP_NAMESPACE using UniqueCommandBuffer = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; using UniqueCommandPool = UniqueHandle; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDeferredOperationKHR = UniqueHandle; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; using UniqueDescriptorPool = UniqueHandle; template class UniqueHandleTraits { public: using deleter = PoolFree; }; @@ -16723,10 +17305,8 @@ namespace VULKAN_HPP_NAMESPACE using UniqueImage = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; using UniqueImageView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueIndirectCommandsLayoutNVX = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueObjectTableNVX = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueIndirectCommandsLayoutNV = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; using UniquePipeline = UniqueHandle; template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; @@ -16885,10 +17465,17 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16935,6 +17522,15 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; @@ -16943,6 +17539,46 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17006,6 +17642,19 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDeferredOperationKHR( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDeferredOperationKHRUnique( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17125,24 +17774,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + typename ResultValueType::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template - typename ResultValueType>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + typename ResultValueType>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -17179,6 +17817,27 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValue> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17341,18 +18000,35 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -17397,6 +18073,22 @@ namespace VULKAN_HPP_NAMESPACE void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17524,31 +18216,17 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -17776,12 +18454,32 @@ namespace VULKAN_HPP_NAMESPACE void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template - Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; @@ -17870,6 +18568,21 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17888,6 +18601,15 @@ namespace VULKAN_HPP_NAMESPACE StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18000,6 +18722,15 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template + void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -18188,6 +18919,22 @@ namespace VULKAN_HPP_NAMESPACE Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18370,13 +19117,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -18494,13 +19234,6 @@ namespace VULKAN_HPP_NAMESPACE void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template - Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; template @@ -18534,6 +19267,15 @@ namespace VULKAN_HPP_NAMESPACE Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT { return m_device; @@ -18946,13 +19688,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -19687,6 +20422,1135 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + struct AabbPositionsKHR + { + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, + float minY_ = {}, + float minZ_ = {}, + float maxX_ = {}, + float maxY_ = {}, + float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT + : minX( minX_ ) + , minY( minY_ ) + , minZ( minZ_ ) + , maxX( maxX_ ) + , maxY( maxY_ ) + , maxZ( maxZ_ ) + {} + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : minX( rhs.minX ) + , minY( rhs.minY ) + , minZ( rhs.minZ ) + , maxX( rhs.maxX ) + , maxY( rhs.maxY ) + , maxZ( rhs.maxZ ) + {} + + AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AabbPositionsKHR ) ); + return *this; + } + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AabbPositionsKHR& operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } + + operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AabbPositionsKHR const& ) const = default; +#else + bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minX == rhs.minX ) + && ( minY == rhs.minY ) + && ( minZ == rhs.minZ ) + && ( maxX == rhs.maxX ) + && ( maxY == rhs.maxY ) + && ( maxZ == rhs.maxZ ); + } + + bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + }; + static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressConstKHR + { + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + } + + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + { + deviceAddress = deviceAddress_; + } + + DeviceOrHostAddressConstKHR( const void* hostAddress_ ) + { + hostAddress = hostAddress_; + } + + DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressConstKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void* hostAddress; +#else + VkDeviceAddress deviceAddress; + const void* hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryTrianglesDataKHR + { + AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexFormat( vertexFormat_ ) + , vertexData( vertexData_ ) + , vertexStride( vertexStride_ ) + , indexType( indexType_ ) + , indexData( indexData_ ) + , transformData( transformData_ ) + {} + + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , vertexFormat( rhs.vertexFormat ) + , vertexData( rhs.vertexData ) + , vertexStride( rhs.vertexStride ) + , indexType( rhs.indexType ) + , indexData( rhs.indexData ) + , transformData( rhs.transformData ) + {} + + AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) - offsetof( AccelerationStructureGeometryTrianglesDataKHR, pNext ) ); + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureGeometryTrianglesDataKHR& operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryAabbsDataKHR + { + AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT + : data( data_ ) + , stride( stride_ ) + {} + + AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , data( rhs.data ) + , stride( rhs.stride ) + {} + + AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryAabbsDataKHR ) - offsetof( AccelerationStructureGeometryAabbsDataKHR, pNext ) ); + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureGeometryAabbsDataKHR& operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryInstancesDataKHR + { + AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) VULKAN_HPP_NOEXCEPT + : arrayOfPointers( arrayOfPointers_ ) + , data( data_ ) + {} + + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , arrayOfPointers( rhs.arrayOfPointers ) + , data( rhs.data ) + {} + + AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryInstancesDataKHR ) - offsetof( AccelerationStructureGeometryInstancesDataKHR, pNext ) ); + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureGeometryInstancesDataKHR& operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + union AccelerationStructureGeometryDataKHR + { + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + } + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + { + triangles = triangles_; + } + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) + { + aabbs = aabbs_; + } + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + { + instances = instances_; + } + + AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + return *this; + } + + operator VkAccelerationStructureGeometryDataKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryKHR + { + AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , geometryType( rhs.geometryType ) + , geometry( rhs.geometry ) + , flags( rhs.flags ) + {} + + AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryKHR ) - offsetof( AccelerationStructureGeometryKHR, pNext ) ); + return *this; + } + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureGeometryKHR& operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressKHR + { + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + } + + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + { + deviceAddress = deviceAddress_; + } + + DeviceOrHostAddressKHR( void* hostAddress_ ) + { + hostAddress = hostAddress_; + } + + DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void* hostAddress; +#else + VkDeviceAddress deviceAddress; + void* hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildGeometryInfoKHR + { + AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , update( update_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryArrayOfPointers( geometryArrayOfPointers_ ) + , geometryCount( geometryCount_ ) + , ppGeometries( ppGeometries_ ) + , scratchData( scratchData_ ) + {} + + AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , flags( rhs.flags ) + , update( rhs.update ) + , srcAccelerationStructure( rhs.srcAccelerationStructure ) + , dstAccelerationStructure( rhs.dstAccelerationStructure ) + , geometryArrayOfPointers( rhs.geometryArrayOfPointers ) + , geometryCount( rhs.geometryCount ) + , ppGeometries( rhs.ppGeometries ) + , scratchData( rhs.scratchData ) + {} + + AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureBuildGeometryInfoKHR ) - offsetof( AccelerationStructureBuildGeometryInfoKHR, pNext ) ); + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureBuildGeometryInfoKHR& operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setUpdate( VULKAN_HPP_NAMESPACE::Bool32 update_ ) VULKAN_HPP_NOEXCEPT + { + update = update_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + geometryArrayOfPointers = geometryArrayOfPointers_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 update = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + }; + static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildOffsetInfoKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveCount( primitiveCount_ ) + , primitiveOffset( primitiveOffset_ ) + , firstVertex( firstVertex_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( AccelerationStructureBuildOffsetInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : primitiveCount( rhs.primitiveCount ) + , primitiveOffset( rhs.primitiveOffset ) + , firstVertex( rhs.firstVertex ) + , transformOffset( rhs.transformOffset ) + {} + + AccelerationStructureBuildOffsetInfoKHR & operator=( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AccelerationStructureBuildOffsetInfoKHR ) ); + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureBuildOffsetInfoKHR& operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } + + operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( primitiveCount == rhs.primitiveCount ) + && ( primitiveOffset == rhs.primitiveOffset ) + && ( firstVertex == rhs.firstVertex ) + && ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + }; + static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateGeometryTypeInfoKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + uint32_t maxPrimitiveCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + uint32_t maxVertexCount_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , indexType( indexType_ ) + , maxVertexCount( maxVertexCount_ ) + , vertexFormat( vertexFormat_ ) + , allowsTransforms( allowsTransforms_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , geometryType( rhs.geometryType ) + , maxPrimitiveCount( rhs.maxPrimitiveCount ) + , indexType( rhs.indexType ) + , maxVertexCount( rhs.maxVertexCount ) + , vertexFormat( rhs.vertexFormat ) + , allowsTransforms( rhs.allowsTransforms ) + {} + + AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) - offsetof( AccelerationStructureCreateGeometryTypeInfoKHR, pNext ) ); + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureCreateGeometryTypeInfoKHR& operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPrimitiveCount = maxPrimitiveCount_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setMaxVertexCount( uint32_t maxVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxVertexCount = maxVertexCount_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT + { + allowsTransforms = allowsTransforms_; + return *this; + } + + operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( geometryType == rhs.geometryType ) + && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) + && ( indexType == rhs.indexType ) + && ( maxVertexCount == rhs.maxVertexCount ) + && ( vertexFormat == rhs.vertexFormat ) + && ( allowsTransforms == rhs.allowsTransforms ); + } + + bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + uint32_t maxPrimitiveCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + uint32_t maxVertexCount = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {}; + }; + static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + uint32_t maxGeometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ) + , type( type_ ) + , flags( flags_ ) + , maxGeometryCount( maxGeometryCount_ ) + , pGeometryInfos( pGeometryInfos_ ) + , deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , compactedSize( rhs.compactedSize ) + , type( rhs.type ) + , flags( rhs.flags ) + , maxGeometryCount( rhs.maxGeometryCount ) + , pGeometryInfos( rhs.pGeometryInfos ) + , deviceAddress( rhs.deviceAddress ) + {} + + AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoKHR ) - offsetof( AccelerationStructureCreateInfoKHR, pNext ) ); + return *this; + } + + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureCreateInfoKHR& operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setMaxGeometryCount( uint32_t maxGeometryCount_ ) VULKAN_HPP_NOEXCEPT + { + maxGeometryCount = maxGeometryCount_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT + { + pGeometryInfos = pGeometryInfos_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( compactedSize == rhs.compactedSize ) + && ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( pGeometryInfos == rhs.pGeometryInfos ) + && ( deviceAddress == rhs.deviceAddress ); + } + + bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + uint32_t maxGeometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct GeometryTrianglesNV { VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, @@ -20054,9 +21918,9 @@ namespace VULKAN_HPP_NAMESPACE struct GeometryNV { - VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles, + VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , geometry( geometry_ ) , flags( flags_ ) @@ -20092,7 +21956,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; @@ -20104,7 +21968,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -20141,16 +22005,16 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AccelerationStructureInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel, + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, @@ -20257,7 +22121,7 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; uint32_t instanceCount = {}; uint32_t geometryCount = {}; @@ -20351,9 +22215,368 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureDeviceAddressInfoKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , accelerationStructure( rhs.accelerationStructure ) + {} + + AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureDeviceAddressInfoKHR ) - offsetof( AccelerationStructureDeviceAddressInfoKHR, pNext ) ); + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureDeviceAddressInfoKHR& operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct TransformMatrixKHR + { + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array,3> const& matrix_ = {} ) VULKAN_HPP_NOEXCEPT + : matrix{} + { + VULKAN_HPP_NAMESPACE::ConstExpression2DArrayCopy::copy( matrix, matrix_ ); + } + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : matrix{} + { + VULKAN_HPP_NAMESPACE::ConstExpression2DArrayCopy::copy( matrix, rhs.matrix ); + } + + TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( TransformMatrixKHR ) ); + return *this; + } + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + TransformMatrixKHR& operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + TransformMatrixKHR & setMatrix( std::array,3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( matrix, matrix_.data(), 3 * 4 * sizeof( float ) ); + return *this; + } + + operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TransformMatrixKHR const& ) const = default; +#else + bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memcmp( matrix, rhs.matrix, 3 * 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float matrix[3][4] = {}; + }; + static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AccelerationStructureInstanceKHR + { + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : transform( rhs.transform ) + , instanceCustomIndex( rhs.instanceCustomIndex ) + , mask( rhs.mask ) + , instanceShaderBindingTableRecordOffset( rhs.instanceShaderBindingTableRecordOffset ) + , flags( rhs.flags ) + , accelerationStructureReference( rhs.accelerationStructureReference ) + {} + + AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AccelerationStructureInstanceKHR ) ); + return *this; + } + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureInstanceKHR& operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast(&flags_); + return *this; + } + + AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } + + operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform == rhs.transform ) + && ( instanceCustomIndex == rhs.instanceCustomIndex ) + && ( mask == rhs.mask ) + && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) + && ( flags == rhs.flags ) + && ( accelerationStructureReference == rhs.accelerationStructureReference ); + } + + bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureMemoryRequirementsInfoKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , buildType( buildType_ ) + , accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , buildType( rhs.buildType ) + , accelerationStructure( rhs.accelerationStructure ) + {} + + AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) - offsetof( AccelerationStructureMemoryRequirementsInfoKHR, pNext ) ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureMemoryRequirementsInfoKHR& operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT + { + buildType = buildType_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( buildType == rhs.buildType ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct AccelerationStructureMemoryRequirementsInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , accelerationStructure( accelerationStructure_ ) @@ -20430,12 +22653,88 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; }; static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureVersionKHR + { + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( const uint8_t* versionData_ = {} ) VULKAN_HPP_NOEXCEPT + : versionData( versionData_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( AccelerationStructureVersionKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , versionData( rhs.versionData ) + {} + + AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureVersionKHR ) - offsetof( AccelerationStructureVersionKHR, pNext ) ); + return *this; + } + + AccelerationStructureVersionKHR( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureVersionKHR& operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT + { + versionData = versionData_; + return *this; + } + + operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureVersionKHR const& ) const = default; +#else + bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( versionData == rhs.versionData ); + } + + bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR; + const void* pNext = {}; + const uint8_t* versionData = {}; + }; + static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct AcquireNextImageInfoKHR { VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, @@ -22389,13 +24688,13 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BindAccelerationStructureMemoryInfoNV + struct BindAccelerationStructureMemoryInfoKHR { - VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructure( accelerationStructure_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) @@ -22403,7 +24702,7 @@ namespace VULKAN_HPP_NAMESPACE , pDeviceIndices( pDeviceIndices_ ) {} - VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( BindAccelerationStructureMemoryInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) , accelerationStructure( rhs.accelerationStructure ) , memory( rhs.memory ) @@ -22412,73 +24711,73 @@ namespace VULKAN_HPP_NAMESPACE , pDeviceIndices( rhs.pDeviceIndices ) {} - BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoKHR ) - offsetof( BindAccelerationStructureMemoryInfoKHR, pNext ) ); return *this; } - BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR& operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } - BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } - BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindAccelerationStructureMemoryInfoNV const& ) const = default; + auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default; #else - bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) @@ -22489,23 +24788,23 @@ namespace VULKAN_HPP_NAMESPACE && ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; uint32_t deviceIndexCount = {}; const uint32_t* pDeviceIndices = {}; }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindBufferMemoryDeviceGroupInfo { @@ -23198,6 +25497,154 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct BindIndexBufferIndirectCommandNV + { + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , indexType( indexType_ ) + {} + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const& rhs ) VULKAN_HPP_NOEXCEPT + : bufferAddress( rhs.bufferAddress ) + , size( rhs.size ) + , indexType( rhs.indexType ) + {} + + BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BindIndexBufferIndirectCommandNV ) ); + return *this; + } + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindIndexBufferIndirectCommandNV& operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) + && ( size == rhs.size ) + && ( indexType == rhs.indexType ); + } + + bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindShaderGroupIndirectCommandNV + { + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : groupIndex( groupIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const& rhs ) VULKAN_HPP_NOEXCEPT + : groupIndex( rhs.groupIndex ) + {} + + BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BindShaderGroupIndirectCommandNV ) ); + return *this; + } + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindShaderGroupIndirectCommandNV& operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } + + operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( groupIndex == rhs.groupIndex ); + } + + bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupIndex = {}; + }; + static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct SparseMemoryBind { VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, @@ -24120,6 +26567,91 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct BindVertexBufferIndirectCommandNV + { + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const& rhs ) VULKAN_HPP_NOEXCEPT + : bufferAddress( rhs.bufferAddress ) + , size( rhs.size ) + , stride( rhs.stride ) + {} + + BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BindVertexBufferIndirectCommandNV ) ); + return *this; + } + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindVertexBufferIndirectCommandNV& operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) + && ( size == rhs.size ) + && ( stride == rhs.stride ); + } + + bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct BufferCopy { VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, @@ -25586,360 +28118,6 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct IndirectCommandsTokenNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : tokenType( tokenType_ ) - , buffer( buffer_ ) - , offset( offset_ ) - {} - - VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( IndirectCommandsTokenNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : tokenType( rhs.tokenType ) - , buffer( rhs.buffer ) - , offset( rhs.offset ) - {} - - IndirectCommandsTokenNVX & operator=( IndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( IndirectCommandsTokenNVX ) ); - return *this; - } - - IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT - { - tokenType = tokenType_; - return *this; - } - - IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - operator VkIndirectCommandsTokenNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsTokenNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsTokenNVX const& ) const = default; -#else - bool operator==( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( tokenType == rhs.tokenType ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ); - } - - bool operator!=( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - }; - static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CmdProcessCommandsInfoNVX - { - VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, - uint32_t indirectCommandsTokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = {}, - uint32_t maxSequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) - , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) - , maxSequencesCount( maxSequencesCount_ ) - , targetCommandBuffer( targetCommandBuffer_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - {} - - VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( CmdProcessCommandsInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , objectTable( rhs.objectTable ) - , indirectCommandsLayout( rhs.indirectCommandsLayout ) - , indirectCommandsTokenCount( rhs.indirectCommandsTokenCount ) - , pIndirectCommandsTokens( rhs.pIndirectCommandsTokens ) - , maxSequencesCount( rhs.maxSequencesCount ) - , targetCommandBuffer( rhs.targetCommandBuffer ) - , sequencesCountBuffer( rhs.sequencesCountBuffer ) - , sequencesCountOffset( rhs.sequencesCountOffset ) - , sequencesIndexBuffer( rhs.sequencesIndexBuffer ) - , sequencesIndexOffset( rhs.sequencesIndexOffset ) - {} - - CmdProcessCommandsInfoNVX & operator=( CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) ); - return *this; - } - - CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT - { - objectTable = objectTable_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsTokenCount = indirectCommandsTokenCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT - { - pIndirectCommandsTokens = pIndirectCommandsTokens_; - return *this; - } - - CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT - { - targetCommandBuffer = targetCommandBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountBuffer = sequencesCountBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountOffset = sequencesCountOffset_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexBuffer = sequencesIndexBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexOffset = sequencesIndexOffset_; - return *this; - } - - operator VkCmdProcessCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCmdProcessCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CmdProcessCommandsInfoNVX const& ) const = default; -#else - bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) - && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) - && ( maxSequencesCount == rhs.maxSequencesCount ) - && ( targetCommandBuffer == rhs.targetCommandBuffer ) - && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) - && ( sequencesCountOffset == rhs.sequencesCountOffset ) - && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) - && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); - } - - bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; - uint32_t indirectCommandsTokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens = {}; - uint32_t maxSequencesCount = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; - }; - static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CmdReserveSpaceForCommandsInfoNVX - { - VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, - uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) - {} - - VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( CmdReserveSpaceForCommandsInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , objectTable( rhs.objectTable ) - , indirectCommandsLayout( rhs.indirectCommandsLayout ) - , maxSequencesCount( rhs.maxSequencesCount ) - {} - - CmdReserveSpaceForCommandsInfoNVX & operator=( CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) ); - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT - { - objectTable = objectTable_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - operator VkCmdReserveSpaceForCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCmdReserveSpaceForCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CmdReserveSpaceForCommandsInfoNVX const& ) const = default; -#else - bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( maxSequencesCount == rhs.maxSequencesCount ); - } - - bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; - uint32_t maxSequencesCount = {}; - }; - static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CoarseSampleLocationNV { VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, @@ -27435,6 +29613,184 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureInfoKHR + { + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , src( rhs.src ) + , dst( rhs.dst ) + , mode( rhs.mode ) + {} + + CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureInfoKHR ) - offsetof( CopyAccelerationStructureInfoKHR, pNext ) ); + return *this; + } + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CopyAccelerationStructureInfoKHR& operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( src == rhs.src ) + && ( dst == rhs.dst ) + && ( mode == rhs.mode ); + } + + bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureToMemoryInfoKHR + { + CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , src( rhs.src ) + , dst( rhs.dst ) + , mode( rhs.mode ) + {} + + CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) - offsetof( CopyAccelerationStructureToMemoryInfoKHR, pNext ) ); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CopyAccelerationStructureToMemoryInfoKHR& operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct CopyDescriptorSet { VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, @@ -27575,6 +29931,86 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyMemoryToAccelerationStructureInfoKHR + { + CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , src( rhs.src ) + , dst( rhs.dst ) + , mode( rhs.mode ) + {} + + CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) - offsetof( CopyMemoryToAccelerationStructureInfoKHR, pNext ) ); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CopyMemoryToAccelerationStructureInfoKHR& operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR struct D3D12FenceSubmitInfoKHR { @@ -28910,6 +31346,82 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct DeferredOperationInfoKHR + { + VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : operationHandle( operationHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( DeferredOperationInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , operationHandle( rhs.operationHandle ) + {} + + DeferredOperationInfoKHR & operator=( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeferredOperationInfoKHR ) - offsetof( DeferredOperationInfoKHR, pNext ) ); + return *this; + } + + DeferredOperationInfoKHR( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeferredOperationInfoKHR& operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT + { + operationHandle = operationHandle_; + return *this; + } + + operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeferredOperationInfoKHR const& ) const = default; +#else + bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( operationHandle == rhs.operationHandle ); + } + + bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {}; + }; + static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct DescriptorBufferInfo { VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, @@ -31112,271 +33624,153 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceEventInfoEXT + struct DeviceDiagnosticsConfigCreateInfoNV { - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT - : deviceEvent( deviceEvent_ ) - {} - - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , deviceEvent( rhs.deviceEvent ) - {} - - DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) ); - return *this; - } - - DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT - { - deviceEvent = deviceEvent_; - return *this; - } - - operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceEventInfoEXT const& ) const = default; -#else - bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceEvent == rhs.deviceEvent ); - } - - bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; - }; - static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGeneratedCommandsFeaturesNVX - { - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT - : computeBindingPointSupport( computeBindingPointSupport_ ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) {} - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( DeviceGeneratedCommandsFeaturesNVX const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) - , computeBindingPointSupport( rhs.computeBindingPointSupport ) + , flags( rhs.flags ) {} - DeviceGeneratedCommandsFeaturesNVX & operator=( DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) - offsetof( DeviceDiagnosticsConfigCreateInfoNV, pNext ) ); return *this; } - DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV& operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - computeBindingPointSupport = computeBindingPointSupport_; + flags = flags_; return *this; } - operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGeneratedCommandsFeaturesNVX const& ) const = default; + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default; #else - bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); + && ( flags == rhs.flags ); } - bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; }; - static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceGeneratedCommandsLimitsNVX + struct DeviceEventInfoEXT { - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {}, - uint32_t maxObjectEntryCounts_ = {}, - uint32_t minSequenceCountBufferOffsetAlignment_ = {}, - uint32_t minSequenceIndexBufferOffsetAlignment_ = {}, - uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) - , maxObjectEntryCounts( maxObjectEntryCounts_ ) - , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) - , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) - , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT + : deviceEvent( deviceEvent_ ) {} - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( DeviceGeneratedCommandsLimitsNVX const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) - , maxIndirectCommandsLayoutTokenCount( rhs.maxIndirectCommandsLayoutTokenCount ) - , maxObjectEntryCounts( rhs.maxObjectEntryCounts ) - , minSequenceCountBufferOffsetAlignment( rhs.minSequenceCountBufferOffsetAlignment ) - , minSequenceIndexBufferOffsetAlignment( rhs.minSequenceIndexBufferOffsetAlignment ) - , minCommandsTokenBufferOffsetAlignment( rhs.minCommandsTokenBufferOffsetAlignment ) + , deviceEvent( rhs.deviceEvent ) {} - DeviceGeneratedCommandsLimitsNVX & operator=( DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) ); return *this; } - DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT - { - maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT - { - maxObjectEntryCounts = maxObjectEntryCounts_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT - { - minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT - { - minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT { - minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; + deviceEvent = deviceEvent_; return *this; } - operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGeneratedCommandsLimitsNVX const& ) const = default; + auto operator<=>( DeviceEventInfoEXT const& ) const = default; #else - bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) - && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) - && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) - && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) - && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); + && ( deviceEvent == rhs.deviceEvent ); } - bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; const void* pNext = {}; - uint32_t maxIndirectCommandsLayoutTokenCount = {}; - uint32_t maxObjectEntryCounts = {}; - uint32_t minSequenceCountBufferOffsetAlignment = {}; - uint32_t minSequenceIndexBufferOffsetAlignment = {}; - uint32_t minCommandsTokenBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; }; - static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceGroupBindSparseInfo { @@ -36642,6 +39036,363 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct IndirectCommandsStreamNV + { + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const& rhs ) VULKAN_HPP_NOEXCEPT + : buffer( rhs.buffer ) + , offset( rhs.offset ) + {} + + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( IndirectCommandsStreamNV ) ); + return *this; + } + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + IndirectCommandsStreamNV& operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsStreamNV const& ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeneratedCommandsInfoNV + { + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( streamCount_ ) + , pStreams( pStreams_ ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , pipeline( rhs.pipeline ) + , indirectCommandsLayout( rhs.indirectCommandsLayout ) + , streamCount( rhs.streamCount ) + , pStreams( rhs.pStreams ) + , sequencesCount( rhs.sequencesCount ) + , preprocessBuffer( rhs.preprocessBuffer ) + , preprocessOffset( rhs.preprocessOffset ) + , preprocessSize( rhs.preprocessSize ) + , sequencesCountBuffer( rhs.sequencesCountBuffer ) + , sequencesCountOffset( rhs.sequencesCountOffset ) + , sequencesIndexBuffer( rhs.sequencesIndexBuffer ) + , sequencesIndexOffset( rhs.sequencesIndexOffset ) + {} + + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GeneratedCommandsInfoNV ) - offsetof( GeneratedCommandsInfoNV, pNext ) ); + return *this; + } + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeneratedCommandsInfoNV& operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeneratedCommandsInfoNV const& ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipeline == rhs.pipeline ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( streamCount == rhs.streamCount ) + && ( pStreams == rhs.pStreams ) + && ( sequencesCount == rhs.sequencesCount ) + && ( preprocessBuffer == rhs.preprocessBuffer ) + && ( preprocessOffset == rhs.preprocessOffset ) + && ( preprocessSize == rhs.preprocessSize ) + && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) + && ( sequencesCountOffset == rhs.sequencesCountOffset ) + && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) + && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , pipeline( rhs.pipeline ) + , indirectCommandsLayout( rhs.indirectCommandsLayout ) + , maxSequencesCount( rhs.maxSequencesCount ) + {} + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) - offsetof( GeneratedCommandsMemoryRequirementsInfoNV, pNext ) ); + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeneratedCommandsMemoryRequirementsInfoNV& operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipeline == rhs.pipeline ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct VertexInputBindingDescription { VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, @@ -38603,6 +41354,220 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct GraphicsShaderGroupCreateInfoNV + { + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT + : stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , stageCount( rhs.stageCount ) + , pStages( rhs.pStages ) + , pVertexInputState( rhs.pVertexInputState ) + , pTessellationState( rhs.pTessellationState ) + {} + + GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GraphicsShaderGroupCreateInfoNV ) - offsetof( GraphicsShaderGroupCreateInfoNV, pNext ) ); + return *this; + } + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GraphicsShaderGroupCreateInfoNV& operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( pVertexInputState == rhs.pVertexInputState ) + && ( pTessellationState == rhs.pTessellationState ); + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void* pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; + }; + static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , pipelineCount( pipelineCount_ ) + , pPipelines( pPipelines_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , groupCount( rhs.groupCount ) + , pGroups( rhs.pGroups ) + , pipelineCount( rhs.pipelineCount ) + , pPipelines( rhs.pPipelines ) + {} + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) - offsetof( GraphicsPipelineShaderGroupsCreateInfoNV, pNext ) ); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GraphicsPipelineShaderGroupsCreateInfoNV& operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( pipelineCount == rhs.pipelineCount ) + && ( pPipelines == rhs.pPipelines ); + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void* pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {}; + }; + static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct XYColorEXT { VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, @@ -41951,208 +44916,340 @@ namespace VULKAN_HPP_NAMESPACE static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutTokenNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, - uint32_t bindingUnit_ = {}, - uint32_t dynamicCount_ = {}, - uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + struct IndirectCommandsLayoutTokenNV + { + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, + const uint32_t* pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT : tokenType( tokenType_ ) - , bindingUnit( bindingUnit_ ) - , dynamicCount( dynamicCount_ ) - , divisor( divisor_ ) - {} - - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( IndirectCommandsLayoutTokenNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : tokenType( rhs.tokenType ) - , bindingUnit( rhs.bindingUnit ) - , dynamicCount( rhs.dynamicCount ) - , divisor( rhs.divisor ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( indexTypeCount_ ) + , pIndexTypes( pIndexTypes_ ) + , pIndexTypeValues( pIndexTypeValues_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , tokenType( rhs.tokenType ) + , stream( rhs.stream ) + , offset( rhs.offset ) + , vertexBindingUnit( rhs.vertexBindingUnit ) + , vertexDynamicStride( rhs.vertexDynamicStride ) + , pushconstantPipelineLayout( rhs.pushconstantPipelineLayout ) + , pushconstantShaderStageFlags( rhs.pushconstantShaderStageFlags ) + , pushconstantOffset( rhs.pushconstantOffset ) + , pushconstantSize( rhs.pushconstantSize ) + , indirectStateFlags( rhs.indirectStateFlags ) + , indexTypeCount( rhs.indexTypeCount ) + , pIndexTypes( rhs.pIndexTypes ) + , pIndexTypeValues( rhs.pIndexTypeValues ) {} - IndirectCommandsLayoutTokenNVX & operator=( IndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutTokenNV ) - offsetof( IndirectCommandsLayoutTokenNV, pNext ) ); return *this; } - IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV& operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return *this; } - IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT { - bindingUnit = bindingUnit_; + stream = stream_; return *this; } - IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - dynamicCount = dynamicCount_; + offset = offset_; return *this; } - IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT { - divisor = divisor_; + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; return *this; } - operator VkIndirectCommandsLayoutTokenNVX const&() const VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; } - operator VkIndirectCommandsLayoutTokenNVX &() VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + + operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutTokenNVX const& ) const = default; + auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default; #else - bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( tokenType == rhs.tokenType ) - && ( bindingUnit == rhs.bindingUnit ) - && ( dynamicCount == rhs.dynamicCount ) - && ( divisor == rhs.divisor ); + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( tokenType == rhs.tokenType ) + && ( stream == rhs.stream ) + && ( offset == rhs.offset ) + && ( vertexBindingUnit == rhs.vertexBindingUnit ) + && ( vertexDynamicStride == rhs.vertexDynamicStride ) + && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) + && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) + && ( pushconstantOffset == rhs.pushconstantOffset ) + && ( pushconstantSize == rhs.pushconstantSize ) + && ( indirectStateFlags == rhs.indirectStateFlags ) + && ( indexTypeCount == rhs.indexTypeCount ) + && ( pIndexTypes == rhs.pIndexTypes ) + && ( pIndexTypeValues == rhs.pIndexTypeValues ); } - bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; - uint32_t bindingUnit = {}; - uint32_t dynamicCount = {}; - uint32_t divisor = {}; - }; - static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct IndirectCommandsLayoutCreateInfoNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = {}, - uint32_t tokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineBindPoint( pipelineBindPoint_ ) - , flags( flags_ ) + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {}; + const uint32_t* pIndexTypeValues = {}; + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct IndirectCommandsLayoutCreateInfoNV + { + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t* pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) , tokenCount( tokenCount_ ) , pTokens( pTokens_ ) + , streamCount( streamCount_ ) + , pStreamStrides( pStreamStrides_ ) {} - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( IndirectCommandsLayoutCreateInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) - , pipelineBindPoint( rhs.pipelineBindPoint ) , flags( rhs.flags ) + , pipelineBindPoint( rhs.pipelineBindPoint ) , tokenCount( rhs.tokenCount ) , pTokens( rhs.pTokens ) + , streamCount( rhs.streamCount ) + , pStreamStrides( rhs.pStreamStrides ) {} - IndirectCommandsLayoutCreateInfoNVX & operator=( IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutCreateInfoNV ) - offsetof( IndirectCommandsLayoutCreateInfoNV, pNext ) ); return *this; } - IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV& operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - pipelineBindPoint = pipelineBindPoint_; + flags = flags_; return *this; } - IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return *this; } - IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; } - operator VkIndirectCommandsLayoutCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStreamStrides = pStreamStrides_; + return *this; } - operator VkIndirectCommandsLayoutCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutCreateInfoNVX const& ) const = default; + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default; #else - bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) - && ( pTokens == rhs.pTokens ); + && ( pTokens == rhs.pTokens ) + && ( streamCount == rhs.streamCount ) + && ( pStreamStrides == rhs.pStreamStrides ); } - bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags = {}; uint32_t tokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {}; + uint32_t streamCount = {}; + const uint32_t* pStreamStrides = {}; }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct InitializePerformanceApiInfoINTEL { @@ -43698,1138 +46795,403 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT : size( size_ ) - , alignment( alignment_ ) - , memoryTypeBits( memoryTypeBits_ ) - {} - - VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const& rhs ) VULKAN_HPP_NOEXCEPT - : size( rhs.size ) - , alignment( rhs.alignment ) - , memoryTypeBits( rhs.memoryTypeBits ) - {} - - MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( MemoryRequirements ) ); - return *this; - } - - MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements const& ) const = default; -#else - bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( size == rhs.size ) - && ( alignment == rhs.alignment ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; - uint32_t memoryTypeBits = {}; - }; - static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryRequirements2 - { - VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryRequirements( memoryRequirements_ ) - {} - - VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , memoryRequirements( rhs.memoryRequirements ) - {} - - MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) ); - return *this; - } - - MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements2 const& ) const = default; -#else - bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; - }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryType - { - VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, - uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : propertyFlags( propertyFlags_ ) - , heapIndex( heapIndex_ ) - {} - - VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const& rhs ) VULKAN_HPP_NOEXCEPT - : propertyFlags( rhs.propertyFlags ) - , heapIndex( rhs.heapIndex ) - {} - - MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( MemoryType ) ); - return *this; - } - - MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryType &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryType const& ) const = default; -#else - bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( propertyFlags == rhs.propertyFlags ) - && ( heapIndex == rhs.heapIndex ); - } - - bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; - uint32_t heapIndex = {}; - }; - static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryWin32HandlePropertiesKHR - { - VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , memoryTypeBits( rhs.memoryTypeBits ) - {} - - MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); - return *this; - } - - MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; -#else - bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT - { - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, - const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pLayer( pLayer_ ) - {} - - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , flags( rhs.flags ) - , pLayer( rhs.pLayer ) - {} - - MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) ); - return *this; - } - - MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT - { - pLayer = pLayer_; - return *this; - } - - operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; -#else - bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pLayer == rhs.pLayer ); - } - - bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; - const CAMetalLayer* pLayer = {}; - }; - static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - struct MultisamplePropertiesEXT - { - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - {} - - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , maxSampleLocationGridSize( rhs.maxSampleLocationGridSize ) - {} - - MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) ); - return *this; - } - - MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MultisamplePropertiesEXT const& ) const = default; -#else - bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); - } - - bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableCreateInfoNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {}, - const uint32_t* pObjectEntryCounts_ = {}, - const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {}, - uint32_t maxUniformBuffersPerDescriptor_ = {}, - uint32_t maxStorageBuffersPerDescriptor_ = {}, - uint32_t maxStorageImagesPerDescriptor_ = {}, - uint32_t maxSampledImagesPerDescriptor_ = {}, - uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : objectCount( objectCount_ ) - , pObjectEntryTypes( pObjectEntryTypes_ ) - , pObjectEntryCounts( pObjectEntryCounts_ ) - , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) - , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) - , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) - , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) - , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) - , maxPipelineLayouts( maxPipelineLayouts_ ) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( ObjectTableCreateInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , objectCount( rhs.objectCount ) - , pObjectEntryTypes( rhs.pObjectEntryTypes ) - , pObjectEntryCounts( rhs.pObjectEntryCounts ) - , pObjectEntryUsageFlags( rhs.pObjectEntryUsageFlags ) - , maxUniformBuffersPerDescriptor( rhs.maxUniformBuffersPerDescriptor ) - , maxStorageBuffersPerDescriptor( rhs.maxStorageBuffersPerDescriptor ) - , maxStorageImagesPerDescriptor( rhs.maxStorageImagesPerDescriptor ) - , maxSampledImagesPerDescriptor( rhs.maxSampledImagesPerDescriptor ) - , maxPipelineLayouts( rhs.maxPipelineLayouts ) - {} - - ObjectTableCreateInfoNVX & operator=( ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) ); - return *this; - } - - ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT - { - objectCount = objectCount_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryTypes = pObjectEntryTypes_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryCounts = pObjectEntryCounts_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryUsageFlags = pObjectEntryUsageFlags_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineLayouts = maxPipelineLayouts_; - return *this; - } - - operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableCreateInfoNVX const& ) const = default; -#else - bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectCount == rhs.objectCount ) - && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) - && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) - && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) - && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) - && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) - && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) - && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) - && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); - } - - bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX; - const void* pNext = {}; - uint32_t objectCount = {}; - const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {}; - const uint32_t* pObjectEntryCounts = {}; - const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {}; - uint32_t maxUniformBuffersPerDescriptor = {}; - uint32_t maxStorageBuffersPerDescriptor = {}; - uint32_t maxStorageImagesPerDescriptor = {}; - uint32_t maxSampledImagesPerDescriptor = {}; - uint32_t maxPipelineLayouts = {}; - }; - static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) {} - VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( ObjectTableEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) - , flags( rhs.flags ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const& rhs ) VULKAN_HPP_NOEXCEPT + : size( rhs.size ) + , alignment( rhs.alignment ) + , memoryTypeBits( rhs.memoryTypeBits ) {} - ObjectTableEntryNVX & operator=( ObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTableEntryNVX ) ); + memcpy( static_cast(this), &rhs, sizeof( MemoryRequirements ) ); return *this; } - ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + *this = *reinterpret_cast(&rhs); return *this; } - operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableEntryNVX const& ) const = default; + auto operator<=>( MemoryRequirements const& ) const = default; #else - bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( flags == rhs.flags ); + return ( size == rhs.size ) + && ( alignment == rhs.alignment ) + && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ObjectTableDescriptorSetEntryNVX + struct MemoryRequirements2 { - VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( ObjectTableDescriptorSetEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) - , flags( rhs.flags ) - , pipelineLayout( rhs.pipelineLayout ) - , descriptorSet( rhs.descriptorSet ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) {} - explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryRequirements( rhs.memoryRequirements ) {} - ObjectTableDescriptorSetEntryNVX & operator=( ObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + memcpy( &pNext, &rhs.pNext, sizeof( MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) ); return *this; } - ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT + MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - descriptorSet = descriptorSet_; + *this = *reinterpret_cast(&rhs); return *this; } - operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableDescriptorSetEntryNVX const& ) const = default; + auto operator<=>( MemoryRequirements2 const& ) const = default; #else - bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( descriptorSet == rhs.descriptorSet ); + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); } - bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; }; - static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ObjectTableIndexBufferEntryNVX + struct MemoryType { - VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - , indexType( indexType_ ) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( ObjectTableIndexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) - , flags( rhs.flags ) - , buffer( rhs.buffer ) - , indexType( rhs.indexType ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) {} - explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) - , indexType( indexType_ ) + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const& rhs ) VULKAN_HPP_NOEXCEPT + : propertyFlags( rhs.propertyFlags ) + , heapIndex( rhs.heapIndex ) {} - ObjectTableIndexBufferEntryNVX & operator=( ObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + memcpy( static_cast(this), &rhs, sizeof( MemoryType ) ); return *this; } - ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { - indexType = indexType_; + *this = *reinterpret_cast(&rhs); return *this; } - operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableIndexBufferEntryNVX const& ) const = default; + auto operator<=>( MemoryType const& ) const = default; #else - bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( indexType == rhs.indexType ); + return ( propertyFlags == rhs.propertyFlags ) + && ( heapIndex == rhs.heapIndex ); } - bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; }; - static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ObjectTablePipelineEntryNVX +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR { - VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipeline( pipeline_ ) - {} - - VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( ObjectTablePipelineEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) - , flags( rhs.flags ) - , pipeline( rhs.pipeline ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) {} - explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipeline( pipeline_ ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryTypeBits( rhs.memoryTypeBits ) {} - ObjectTablePipelineEntryNVX & operator=( ObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + memcpy( &pNext, &rhs.pNext, sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); return *this; } - ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pipeline = pipeline_; + *this = *reinterpret_cast(&rhs); return *this; } - operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTablePipelineEntryNVX const& ) const = default; + auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; #else - bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipeline == rhs.pipeline ); + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void* pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ObjectTablePushConstantEntryNVX +#ifdef VK_USE_PLATFORM_METAL_EXT + struct MetalSurfaceCreateInfoEXT { - VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pLayer( pLayer_ ) {} - VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( ObjectTablePushConstantEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) , flags( rhs.flags ) - , pipelineLayout( rhs.pipelineLayout ) - , stageFlags( rhs.stageFlags ) - {} - - explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) + , pLayer( rhs.pLayer ) {} - ObjectTablePushConstantEntryNVX & operator=( ObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + memcpy( &pNext, &rhs.pNext, sizeof( MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) ); return *this; } - ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + pNext = pNext_; return *this; } - ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT { - stageFlags = stageFlags_; + pLayer = pLayer_; return *this; } - operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTablePushConstantEntryNVX const& ) const = default; + auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; #else - bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( stageFlags == rhs.stageFlags ); + && ( pLayer == rhs.pLayer ); } - bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer* pLayer = {}; }; - static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ObjectTableVertexBufferEntryNVX + struct MultisamplePropertiesEXT { - VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( ObjectTableVertexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT - : type( rhs.type ) - , flags( rhs.flags ) - , buffer( rhs.buffer ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) {} - explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxSampleLocationGridSize( rhs.maxSampleLocationGridSize ) {} - ObjectTableVertexBufferEntryNVX & operator=( ObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + memcpy( &pNext, &rhs.pNext, sizeof( MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) ); return *this; } - ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + *this = *reinterpret_cast(&rhs); return *this; } - operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ObjectTableVertexBufferEntryNVX const& ) const = default; + auto operator<=>( MultisamplePropertiesEXT const& ) const = default; #else - bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); } - bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; }; - static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PastPresentationTimingGOOGLE { @@ -47661,6 +50023,256 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceGeneratedCommands( deviceGeneratedCommands_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceGeneratedCommands( rhs.deviceGeneratedCommands ) + {} + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) + , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) + , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) + , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) + , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) + , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) + , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) + , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) + , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxGraphicsShaderGroupCount( rhs.maxGraphicsShaderGroupCount ) + , maxIndirectSequenceCount( rhs.maxIndirectSequenceCount ) + , maxIndirectCommandsTokenCount( rhs.maxIndirectCommandsTokenCount ) + , maxIndirectCommandsStreamCount( rhs.maxIndirectCommandsStreamCount ) + , maxIndirectCommandsTokenOffset( rhs.maxIndirectCommandsTokenOffset ) + , maxIndirectCommandsStreamStride( rhs.maxIndirectCommandsStreamStride ) + , minSequencesCountBufferOffsetAlignment( rhs.minSequencesCountBufferOffsetAlignment ) + , minSequencesIndexBufferOffsetAlignment( rhs.minSequencesIndexBufferOffsetAlignment ) + , minIndirectCommandsBufferOffsetAlignment( rhs.minIndirectCommandsBufferOffsetAlignment ) + {} + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) + && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) + && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) + && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) + && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) + && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) + && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) + && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) + && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + const void* pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT + : diagnosticsConfig( diagnosticsConfig_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , diagnosticsConfig( rhs.diagnosticsConfig ) + {} + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) - offsetof( PhysicalDeviceDiagnosticsConfigFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV& operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( diagnosticsConfig == rhs.diagnosticsConfig ); + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct PhysicalDeviceDiscardRectanglePropertiesEXT { VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT @@ -51447,6 +54059,80 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipelineCreationCacheControl( rhs.pipelineCreationCacheControl ) + {} + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) - offsetof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT& operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR { VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT @@ -52023,6 +54709,274 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT + : rayTracing( rayTracing_ ) + , rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ) + , rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ) + , rayTracingAccelerationStructureCaptureReplay( rayTracingAccelerationStructureCaptureReplay_ ) + , rayTracingIndirectTraceRays( rayTracingIndirectTraceRays_ ) + , rayTracingIndirectAccelerationStructureBuild( rayTracingIndirectAccelerationStructureBuild_ ) + , rayTracingHostAccelerationStructureCommands( rayTracingHostAccelerationStructureCommands_ ) + , rayQuery( rayQuery_ ) + , rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , rayTracing( rhs.rayTracing ) + , rayTracingShaderGroupHandleCaptureReplay( rhs.rayTracingShaderGroupHandleCaptureReplay ) + , rayTracingShaderGroupHandleCaptureReplayMixed( rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) + , rayTracingAccelerationStructureCaptureReplay( rhs.rayTracingAccelerationStructureCaptureReplay ) + , rayTracingIndirectTraceRays( rhs.rayTracingIndirectTraceRays ) + , rayTracingIndirectAccelerationStructureBuild( rhs.rayTracingIndirectAccelerationStructureBuild ) + , rayTracingHostAccelerationStructureCommands( rhs.rayTracingHostAccelerationStructureCommands ) + , rayQuery( rhs.rayQuery ) + , rayTracingPrimitiveCulling( rhs.rayTracingPrimitiveCulling ) + {} + + PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) - offsetof( PhysicalDeviceRayTracingFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceRayTracingFeaturesKHR& operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracing( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ ) VULKAN_HPP_NOEXCEPT + { + rayTracing = rayTracing_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_; + return *this; + } + + operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rayTracing == rhs.rayTracing ) + && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay ) + && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) + && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay ) + && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays ) + && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild ) + && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands ) + && ( rayQuery == rhs.rayQuery ) + && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling ); + } + + bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingPropertiesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderGroupHandleSize( rhs.shaderGroupHandleSize ) + , maxRecursionDepth( rhs.maxRecursionDepth ) + , maxShaderGroupStride( rhs.maxShaderGroupStride ) + , shaderGroupBaseAlignment( rhs.shaderGroupBaseAlignment ) + , maxGeometryCount( rhs.maxGeometryCount ) + , maxInstanceCount( rhs.maxInstanceCount ) + , maxPrimitiveCount( rhs.maxPrimitiveCount ) + , maxDescriptorSetAccelerationStructures( rhs.maxDescriptorSetAccelerationStructures ) + , shaderGroupHandleCaptureReplaySize( rhs.shaderGroupHandleCaptureReplaySize ) + {} + + PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) - offsetof( PhysicalDeviceRayTracingPropertiesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceRayTracingPropertiesKHR( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceRayTracingPropertiesKHR& operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) + && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( maxInstanceCount == rhs.maxInstanceCount ) + && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) + && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) + && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; + void* pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct PhysicalDeviceRayTracingPropertiesNV { VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, @@ -58397,6 +61351,93 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PipelineLibraryCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT + : libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , libraryCount( rhs.libraryCount ) + , pLibraries( rhs.pLibraries ) + {} + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineLibraryCreateInfoKHR ) - offsetof( PipelineLibraryCreateInfoKHR, pNext ) ); + return *this; + } + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineLibraryCreateInfoKHR& operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + + operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( libraryCount == rhs.libraryCount ) + && ( pLibraries == rhs.pLibraries ); + } + + bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void* pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {}; + }; + static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct PipelineRasterizationConservativeStateCreateInfoEXT { VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, @@ -60807,175 +63848,175 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueryPoolCreateInfoINTEL + struct QueryPoolPerformanceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT - : performanceCountersSampling( performanceCountersSampling_ ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( counterIndexCount_ ) + , pCounterIndices( pCounterIndices_ ) {} - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( QueryPoolCreateInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) - , performanceCountersSampling( rhs.performanceCountersSampling ) + , queueFamilyIndex( rhs.queueFamilyIndex ) + , counterIndexCount( rhs.counterIndexCount ) + , pCounterIndices( rhs.pCounterIndices ) {} - QueryPoolCreateInfoINTEL & operator=( QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); return *this; } - QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - performanceCountersSampling = performanceCountersSampling_; + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; return *this; } - operator VkQueryPoolCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueryPoolCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolCreateInfoINTEL const& ) const = default; + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default; #else - bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( performanceCountersSampling == rhs.performanceCountersSampling ); + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( counterIndexCount == rhs.counterIndexCount ) + && ( pCounterIndices == rhs.pCounterIndices ); } - bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t* pCounterIndices = {}; }; - static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueryPoolPerformanceCreateInfoKHR + struct QueryPoolPerformanceQueryCreateInfoINTEL { - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, - uint32_t counterIndexCount_ = {}, - const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( counterIndexCount_ ) - , pCounterIndices( pCounterIndices_ ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT + : performanceCountersSampling( performanceCountersSampling_ ) {} - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) - , queueFamilyIndex( rhs.queueFamilyIndex ) - , counterIndexCount( rhs.counterIndexCount ) - , pCounterIndices( rhs.pCounterIndices ) + , performanceCountersSampling( rhs.performanceCountersSampling ) {} - QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) - offsetof( QueryPoolPerformanceQueryCreateInfoINTEL, pNext ) ); return *this; } - QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL& operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT { - counterIndexCount = counterIndexCount_; + performanceCountersSampling = performanceCountersSampling_; return *this; } - QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT { - pCounterIndices = pCounterIndices_; - return *this; + return *reinterpret_cast( this ); } - operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default; + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default; #else - bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( counterIndexCount == rhs.counterIndexCount ) - && ( pCounterIndices == rhs.pCounterIndices ); + && ( performanceCountersSampling == rhs.performanceCountersSampling ); } - bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL; const void* pNext = {}; - uint32_t queueFamilyIndex = {}; - uint32_t counterIndexCount = {}; - const uint32_t* pCounterIndices = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; }; - static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct QueueFamilyCheckpointPropertiesNV { @@ -61173,9 +64214,424 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingShaderGroupCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {}, + const void* pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , generalShader( rhs.generalShader ) + , closestHitShader( rhs.closestHitShader ) + , anyHitShader( rhs.anyHitShader ) + , intersectionShader( rhs.intersectionShader ) + , pShaderGroupCaptureReplayHandle( rhs.pShaderGroupCaptureReplayHandle ) + {} + + RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoKHR ) - offsetof( RayTracingShaderGroupCreateInfoKHR, pNext ) ); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RayTracingShaderGroupCreateInfoKHR& operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( generalShader == rhs.generalShader ) + && ( closestHitShader == rhs.closestHitShader ) + && ( anyHitShader == rhs.anyHitShader ) + && ( intersectionShader == rhs.intersectionShader ) + && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void* pShaderGroupCaptureReplayHandle = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineInterfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPayloadSize_ = {}, + uint32_t maxAttributeSize_ = {}, + uint32_t maxCallableSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPayloadSize( maxPayloadSize_ ) + , maxAttributeSize( maxAttributeSize_ ) + , maxCallableSize( maxCallableSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxPayloadSize( rhs.maxPayloadSize ) + , maxAttributeSize( rhs.maxAttributeSize ) + , maxCallableSize( rhs.maxCallableSize ) + {} + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) - offsetof( RayTracingPipelineInterfaceCreateInfoKHR, pNext ) ); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RayTracingPipelineInterfaceCreateInfoKHR& operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxPayloadSize( uint32_t maxPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPayloadSize = maxPayloadSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxAttributeSize( uint32_t maxAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxAttributeSize = maxAttributeSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxCallableSize( uint32_t maxCallableSize_ ) VULKAN_HPP_NOEXCEPT + { + maxCallableSize = maxCallableSize_; + return *this; + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPayloadSize == rhs.maxPayloadSize ) + && ( maxAttributeSize == rhs.maxAttributeSize ) + && ( maxCallableSize == rhs.maxCallableSize ); + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void* pNext = {}; + uint32_t maxPayloadSize = {}; + uint32_t maxAttributeSize = {}; + uint32_t maxCallableSize = {}; + }; + static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , libraries( libraries_ ) + , pLibraryInterface( pLibraryInterface_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , stageCount( rhs.stageCount ) + , pStages( rhs.pStages ) + , groupCount( rhs.groupCount ) + , pGroups( rhs.pGroups ) + , maxRecursionDepth( rhs.maxRecursionDepth ) + , libraries( rhs.libraries ) + , pLibraryInterface( rhs.pLibraryInterface ) + , layout( rhs.layout ) + , basePipelineHandle( rhs.basePipelineHandle ) + , basePipelineIndex( rhs.basePipelineIndex ) + {} + + RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoKHR ) - offsetof( RayTracingPipelineCreateInfoKHR, pNext ) ); + return *this; + } + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RayTracingPipelineCreateInfoKHR& operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraries = libraries_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( libraries == rhs.libraries ) + && ( pLibraryInterface == rhs.pLibraryInterface ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct RayTracingShaderGroupCreateInfoNV { - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral, + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, @@ -61219,7 +64675,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; @@ -61282,7 +64738,7 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; uint32_t generalShader = {}; uint32_t closestHitShader = {}; uint32_t anyHitShader = {}; @@ -64307,6 +67763,69 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct SetStateFlagsIndirectCommandNV + { + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT + : data( data_ ) + {} + + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const& rhs ) VULKAN_HPP_NOEXCEPT + : data( rhs.data ) + {} + + SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SetStateFlagsIndirectCommandNV ) ); + return *this; + } + + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SetStateFlagsIndirectCommandNV& operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( data == rhs.data ); + } + + bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t data = {}; + }; + static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct ShaderModuleCreateInfo { VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, @@ -65062,6 +68581,113 @@ namespace VULKAN_HPP_NAMESPACE static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_GGP*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct StridedBufferRegionKHR + { + VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , stride( stride_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( StridedBufferRegionKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : buffer( rhs.buffer ) + , offset( rhs.offset ) + , stride( rhs.stride ) + , size( rhs.size ) + {} + + explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) + : buffer( indirectCommandsStreamNV.buffer ) + , offset( indirectCommandsStreamNV.offset ) + , stride( stride_ ) + , size( size_ ) + {} + + StridedBufferRegionKHR & operator=( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( StridedBufferRegionKHR ) ); + return *this; + } + + StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + StridedBufferRegionKHR& operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + StridedBufferRegionKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + StridedBufferRegionKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + StridedBufferRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + StridedBufferRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StridedBufferRegionKHR const& ) const = default; +#else + bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( stride == rhs.stride ) + && ( size == rhs.size ); + } + + bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct SubmitInfo { VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, @@ -66694,6 +70320,100 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct TraceRaysIndirectCommandKHR + { + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : width( rhs.width ) + , height( rhs.height ) + , depth( rhs.depth ) + {} + + explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, + uint32_t depth_ = {} ) + : width( extent2D.width ) + , height( extent2D.height ) + , depth( depth_ ) + {} + + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( TraceRaysIndirectCommandKHR ) ); + return *this; + } + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + TraceRaysIndirectCommandKHR& operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) + && ( height == rhs.height ) + && ( depth == rhs.depth ); + } + + bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct ValidationCacheCreateInfoEXT { VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, @@ -67700,69 +71420,69 @@ namespace VULKAN_HPP_NAMESPACE static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct WriteDescriptorSetAccelerationStructureNV + struct WriteDescriptorSetAccelerationStructureKHR { - VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructureCount( accelerationStructureCount_ ) , pAccelerationStructures( pAccelerationStructures_ ) {} - VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const& rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) , accelerationStructureCount( rhs.accelerationStructureCount ) , pAccelerationStructures( rhs.pAccelerationStructures ) {} - WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) ); + memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetAccelerationStructureKHR ) - offsetof( WriteDescriptorSetAccelerationStructureKHR, pNext ) ); return *this; } - WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR& operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast(&rhs); return *this; } - WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } - WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } - operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSetAccelerationStructureNV const& ) const = default; + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default; #else - bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) @@ -67770,20 +71490,20 @@ namespace VULKAN_HPP_NAMESPACE && ( pAccelerationStructures == rhs.pAccelerationStructures ); } - bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; const void* pNext = {}; uint32_t accelerationStructureCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {}; }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct WriteDescriptorSetInlineUniformBlockEXT { @@ -68382,6 +72102,20 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); @@ -68465,16 +72199,54 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); +#else + if ( infos.size() != pOffsetInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -68517,19 +72289,49 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); } #else template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -68583,6 +72385,21 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -69019,6 +72836,19 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -69100,15 +72930,15 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -69152,19 +72982,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -69520,6 +73337,36 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -69561,15 +73408,28 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -69858,15 +73718,29 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d ) const { - Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -69957,6 +73831,30 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); +#else + if ( infos.size() != pOffsetInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + Result result = static_cast( d.vkBuildAccelerationStructureKHR( m_device, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::buildAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -69972,6 +73870,82 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureToMemoryKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyMemoryToAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -70154,6 +74128,34 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDeferredOperationKHR( Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -70467,53 +74469,27 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNV" ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVUnique", deleter ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -70596,6 +74572,86 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } + template + VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } + template + VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines( vectorAllocator ); + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHRUnique", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR }, deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -71040,29 +75096,59 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const + { + Result result = static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -71144,6 +75230,34 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -71379,54 +75493,28 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -71852,20 +75940,60 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d ) const { - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -72063,6 +76191,39 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } +#else + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -72109,6 +76270,22 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -72333,6 +76510,29 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); @@ -72922,16 +77122,46 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -73397,28 +77627,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); -#else - if ( pObjectTableEntries.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -73714,28 +77922,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); -#else - if ( objectEntryTypes.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -73819,6 +78005,22 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), dataSize, pData, stride ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d ) const + { + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_USE_PLATFORM_ANDROID_KHR template VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -75465,21 +79667,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits; - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); - return limits; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); @@ -76627,10 +80814,18 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76669,6 +80864,7 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76732,6 +80928,11 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76777,6 +80978,8 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76784,6 +80987,13 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76860,6 +81070,9 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76882,8 +81095,8 @@ namespace VULKAN_HPP_NAMESPACE template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; @@ -76927,7 +81140,7 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template <> struct isStructureChainValid{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; template <> struct isStructureChainValid{ enum { value = true }; }; #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL @@ -77033,19 +81246,35 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; PFN_vkCmdBlitImage vkCmdBlitImage = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; PFN_vkCmdCopyImage vkCmdCopyImage = 0; PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; @@ -77077,17 +81306,17 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; PFN_vkCmdPushConstants vkCmdPushConstants = 0; PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; PFN_vkCmdResetEvent vkCmdResetEvent = 0; PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; PFN_vkCmdResolveImage vkCmdResolveImage = 0; @@ -77114,9 +81343,16 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetViewport vkCmdSetViewport = 0; PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; @@ -77132,6 +81368,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0; PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; PFN_vkBindBufferMemory vkBindBufferMemory = 0; PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; @@ -77139,12 +81376,30 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkBindImageMemory vkBindImageMemory = 0; PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; PFN_vkCreateBuffer vkCreateBuffer = 0; PFN_vkCreateBufferView vkCreateBufferView = 0; PFN_vkCreateCommandPool vkCreateCommandPool = 0; PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; @@ -77155,11 +81410,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; PFN_vkCreateImage vkCreateImage = 0; PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; - PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; PFN_vkCreateQueryPool vkCreateQueryPool = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; PFN_vkCreateRenderPass vkCreateRenderPass = 0; PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; @@ -77174,10 +81431,17 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; PFN_vkDestroyBuffer vkDestroyBuffer = 0; PFN_vkDestroyBufferView vkDestroyBufferView = 0; PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; @@ -77188,8 +81452,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; PFN_vkDestroyImage vkDestroyImage = 0; PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; - PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; PFN_vkDestroyPipeline vkDestroyPipeline = 0; PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; @@ -77208,7 +81471,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; PFN_vkFreeMemory vkFreeMemory = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureMemoryRequirementsKHR vkGetAccelerationStructureMemoryRequirementsKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; @@ -77222,8 +81491,17 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; @@ -77243,6 +81521,7 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; @@ -77274,6 +81553,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; @@ -77303,7 +81586,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -77326,13 +81608,15 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; PFN_vkWaitForFences vkWaitForFences = 0; PFN_vkWaitSemaphores vkWaitSemaphores = 0; PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -77412,7 +81696,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; @@ -77609,7 +81892,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) ); vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); @@ -77668,19 +81950,35 @@ namespace VULKAN_HPP_NAMESPACE vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); @@ -77712,17 +82010,17 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) ); vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); @@ -77749,9 +82047,16 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); @@ -77767,6 +82072,7 @@ namespace VULKAN_HPP_NAMESPACE vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) ); vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); @@ -77774,12 +82080,30 @@ namespace VULKAN_HPP_NAMESPACE vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); @@ -77790,11 +82114,13 @@ namespace VULKAN_HPP_NAMESPACE vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) ); - vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); @@ -77809,10 +82135,17 @@ namespace VULKAN_HPP_NAMESPACE vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); @@ -77823,8 +82156,7 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) ); - vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); @@ -77843,7 +82175,13 @@ namespace VULKAN_HPP_NAMESPACE vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); @@ -77857,8 +82195,17 @@ namespace VULKAN_HPP_NAMESPACE vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); @@ -77878,6 +82225,7 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); @@ -77909,6 +82257,10 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); @@ -77938,7 +82290,6 @@ namespace VULKAN_HPP_NAMESPACE vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -77961,13 +82312,15 @@ namespace VULKAN_HPP_NAMESPACE vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) ); vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); @@ -77994,19 +82347,35 @@ namespace VULKAN_HPP_NAMESPACE vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); @@ -78038,17 +82407,17 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) ); vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); @@ -78075,9 +82444,16 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); @@ -78093,6 +82469,7 @@ namespace VULKAN_HPP_NAMESPACE vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) ); vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); @@ -78100,12 +82477,30 @@ namespace VULKAN_HPP_NAMESPACE vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); @@ -78116,11 +82511,13 @@ namespace VULKAN_HPP_NAMESPACE vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) ); - vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); @@ -78135,10 +82532,17 @@ namespace VULKAN_HPP_NAMESPACE vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); @@ -78149,8 +82553,7 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) ); - vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); @@ -78169,7 +82572,13 @@ namespace VULKAN_HPP_NAMESPACE vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); @@ -78183,8 +82592,17 @@ namespace VULKAN_HPP_NAMESPACE vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); @@ -78204,6 +82622,7 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); @@ -78235,6 +82654,10 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); @@ -78264,7 +82687,6 @@ namespace VULKAN_HPP_NAMESPACE vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -78287,13 +82709,15 @@ namespace VULKAN_HPP_NAMESPACE vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) ); vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index 2fdffa6..94c781e 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -44,7 +44,7 @@ extern "C" { #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) // Version of this file -#define VK_HEADER_VERSION 134 +#define VK_HEADER_VERSION 135 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) @@ -148,9 +148,15 @@ typedef enum VkResult { VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_INCOMPATIBLE_VERSION_KHR = -1000150000, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, VK_ERROR_NOT_PERMITTED_EXT = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = 1000297000, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, @@ -387,12 +393,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, @@ -456,6 +456,27 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR = 1000150001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR = 1000150008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, @@ -476,8 +497,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, - VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, @@ -554,6 +573,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, + VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR = 1000268000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, @@ -561,10 +581,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, @@ -648,6 +680,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, @@ -1024,8 +1058,10 @@ typedef enum VkQueryType { VK_QUERY_TYPE_TIMESTAMP = 2, VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, - VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000165000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150000, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), @@ -1367,7 +1403,8 @@ typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, - VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), @@ -1396,7 +1433,8 @@ typedef enum VkAttachmentStoreOp { typedef enum VkPipelineBindPoint { VK_PIPELINE_BIND_POINT_GRAPHICS = 0, VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000, + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), @@ -1415,8 +1453,9 @@ typedef enum VkCommandBufferLevel { typedef enum VkIndexType { VK_INDEX_TYPE_UINT16 = 0, VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_NONE_NV = 1000165000, + VK_INDEX_TYPE_NONE_KHR = 1000165000, VK_INDEX_TYPE_UINT8_EXT = 1000265000, + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), @@ -1466,14 +1505,15 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), @@ -1517,6 +1557,7 @@ typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, @@ -1644,13 +1685,15 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, - VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000, - VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; @@ -1749,7 +1792,8 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, - VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400, + VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR = 0x00000400, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1767,6 +1811,11 @@ typedef enum VkShaderModuleCreateFlagBits { VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkShaderModuleCreateFlagBits; typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; typedef VkFlags VkPipelineCacheCreateFlags; typedef enum VkPipelineCreateFlagBits { @@ -1775,9 +1824,19 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = 0x00000200, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, @@ -1801,14 +1860,20 @@ typedef enum VkShaderStageFlagBits { VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100, - VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200, - VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400, - VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800, - VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000, - VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040, VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkShaderStageFlagBits; typedef VkFlags VkPipelineVertexInputStateCreateFlags; @@ -1913,13 +1978,15 @@ typedef enum VkAccessFlagBits { VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, - VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000, - VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000, VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkAccessFlagBits; typedef VkFlags VkAccessFlags; @@ -7436,16 +7503,15 @@ typedef enum VkDebugReportObjectTypeEXT { VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000165000, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), @@ -8023,229 +8089,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( #endif -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { - VkStructureType sType; - const void* pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; - -typedef struct VkDeviceGeneratedCommandsLimitsNVX { - VkStructureType sType; - const void* pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; - -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; - -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX* pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; - -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; - -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; - -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void* pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - -typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); -typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); -typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable); - -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices); - -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits); -#endif - - #define VK_NV_clip_space_w_scaling 1 #define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 #define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" @@ -9290,91 +9133,142 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( #define VK_NV_ray_tracing 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +typedef VkAccelerationStructureKHR VkAccelerationStructureNV; + #define VK_NV_RAY_TRACING_SPEC_VERSION 3 #define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" -#define VK_SHADER_UNUSED_NV (~0U) - -typedef enum VkAccelerationStructureTypeNV { - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0, - VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1, - VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1), - VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureTypeNV; - -typedef enum VkRayTracingShaderGroupTypeNV { - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0, - VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1, - VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2, - VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1), - VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkRayTracingShaderGroupTypeNV; - -typedef enum VkGeometryTypeNV { - VK_GEOMETRY_TYPE_TRIANGLES_NV = 0, - VK_GEOMETRY_TYPE_AABBS_NV = 1, - VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV, - VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV, - VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1), - VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryTypeNV; - -typedef enum VkCopyAccelerationStructureModeNV { - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0, - VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1, - VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1), - VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCopyAccelerationStructureModeNV; - -typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1), - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureMemoryRequirementsTypeNV; - -typedef enum VkGeometryFlagBitsNV { - VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001, - VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002, - VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryFlagBitsNV; -typedef VkFlags VkGeometryFlagsNV; - -typedef enum VkGeometryInstanceFlagBitsNV { - VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001, - VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002, - VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004, - VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, - VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryInstanceFlagBitsNV; -typedef VkFlags VkGeometryInstanceFlagsNV; - -typedef enum VkBuildAccelerationStructureFlagBitsNV { - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008, - VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010, - VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkBuildAccelerationStructureFlagBitsNV; -typedef VkFlags VkBuildAccelerationStructureFlagsNV; +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_KHR = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_KHR = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_KHR = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR + 1), + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 1000150000, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_BEGIN_RANGE_KHR = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_END_RANGE_KHR = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_RANGE_SIZE_KHR = (VK_GEOMETRY_TYPE_AABBS_KHR - VK_GEOMETRY_TYPE_TRIANGLES_KHR + 1), + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_KHR = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_KHR = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_KHR = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR + 1), + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_KHR = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_KHR = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_KHR = (VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR + 1), + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeKHR { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_KHR = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_KHR = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_KHR = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR + 1), + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeKHR; +typedef VkAccelerationStructureMemoryRequirementsTypeKHR VkAccelerationStructureMemoryRequirementsTypeNV; + + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + typedef struct VkRayTracingShaderGroupCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkRayTracingShaderGroupTypeNV type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; } VkRayTracingShaderGroupCreateInfoNV; typedef struct VkRayTracingPipelineCreateInfoNV { @@ -9422,11 +9316,11 @@ typedef struct VkGeometryDataNV { } VkGeometryDataNV; typedef struct VkGeometryNV { - VkStructureType sType; - const void* pNext; - VkGeometryTypeNV geometryType; - VkGeometryDataNV geometry; - VkGeometryFlagsNV flags; + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; } VkGeometryNV; typedef struct VkAccelerationStructureInfoNV { @@ -9446,22 +9340,26 @@ typedef struct VkAccelerationStructureCreateInfoNV { VkAccelerationStructureInfoNV info; } VkAccelerationStructureCreateInfoNV; -typedef struct VkBindAccelerationStructureMemoryInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureNV accelerationStructure; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindAccelerationStructureMemoryInfoNV; - -typedef struct VkWriteDescriptorSetAccelerationStructureNV { - VkStructureType sType; - const void* pNext; - uint32_t accelerationStructureCount; - const VkAccelerationStructureNV* pAccelerationStructures; -} VkWriteDescriptorSetAccelerationStructureNV; +typedef struct VkBindAccelerationStructureMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoKHR; + +typedef VkBindAccelerationStructureMemoryInfoKHR VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef VkWriteDescriptorSetAccelerationStructureKHR VkWriteDescriptorSetAccelerationStructureNV; typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { VkStructureType sType; @@ -9483,17 +9381,49 @@ typedef struct VkPhysicalDeviceRayTracingPropertiesNV { uint32_t maxDescriptorSetAccelerationStructures; } VkPhysicalDeviceRayTracingPropertiesNV; +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryKHR)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode); typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); #ifndef VK_NO_PROTOTYPES @@ -9503,9 +9433,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( @@ -9513,10 +9448,15 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryKHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); + VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, @@ -9524,16 +9464,16 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset); VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeNV mode); + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, + VkCopyAccelerationStructureModeKHR mode); VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, @@ -9560,6 +9500,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, @@ -9570,14 +9518,22 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, - const VkAccelerationStructureNV* pAccelerationStructures, + const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); @@ -10020,7 +9976,7 @@ typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { #define VK_INTEL_performance_query 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) -#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 #define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" typedef enum VkPerformanceConfigurationTypeINTEL { @@ -10087,11 +10043,13 @@ typedef struct VkInitializePerformanceApiInfoINTEL { void* pUserData; } VkInitializePerformanceApiInfoINTEL; -typedef struct VkQueryPoolCreateInfoINTEL { +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { VkStructureType sType; const void* pNext; VkQueryPoolSamplingModeINTEL performanceCountersSampling; -} VkQueryPoolCreateInfoINTEL; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; typedef struct VkPerformanceMarkerInfoINTEL { VkStructureType sType; @@ -10420,16 +10378,17 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; #define VK_EXT_validation_features 1 -#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 2 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 3 #define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" typedef enum VkValidationFeatureEnableEXT { VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, VK_VALIDATION_FEATURE_ENABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, - VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, - VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1), + VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1), VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationFeatureEnableEXT; @@ -10703,6 +10662,199 @@ typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NV = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV - VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV + 1), + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + const void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + #define VK_EXT_texel_buffer_alignment 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" @@ -10745,6 +10897,42 @@ typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { #define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 #define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + #ifdef __cplusplus } #endif diff --git a/registry/generator.py b/registry/generator.py index 47dfb70..95fda21 100644 --- a/registry/generator.py +++ b/registry/generator.py @@ -78,6 +78,7 @@ def regSortCategoryKey(feature): def regSortOrderKey(feature): """Sort key for regSortFeatures - key is the sortorder attribute.""" + # print("regSortOrderKey {} -> {}".format(feature.name, feature.sortorder)) return feature.sortorder @@ -98,14 +99,14 @@ def regSortExtensionNumberKey(feature): def regSortFeatures(featureList): """Default sort procedure for features. - - Sorts by primary key of feature category ('feature' or 'extension'), - - then by sort order within the category + - Sorts by explicit sort order (default 0) relative to other features + - then by feature category ('feature' or 'extension'), - then by version number (for features) - then by extension number (for extensions)""" featureList.sort(key=regSortExtensionNumberKey) featureList.sort(key=regSortFeatureVersionKey) - featureList.sort(key=regSortOrderKey) featureList.sort(key=regSortCategoryKey) + featureList.sort(key=regSortOrderKey) class GeneratorOptions: diff --git a/registry/genvk.py b/registry/genvk.py index 37d050e..668be79 100755 --- a/registry/genvk.py +++ b/registry/genvk.py @@ -245,10 +245,16 @@ def makeGenOpts(args): # Track all platform extensions, for exclusion from vulkan_core.h allPlatformExtensions = [] - # Extensions suppressed for all platforms. - # Covers common WSI extension types. + # Extensions suppressed for all WSI platforms (WSI extensions required + # by all platforms) commonSuppressExtensions = [ 'VK_KHR_display', 'VK_KHR_swapchain' ] + # Extensions required and suppressed for beta "platform". This can + # probably eventually be derived from the requires= attributes of + # the extension blocks. + betaRequireExtensions = [ 'VK_KHR_ray_tracing', 'VK_KHR_deferred_host_operations', 'VK_KHR_pipeline_library' ] + betaSuppressExtensions = [ 'VK_NV_ray_tracing' ] + platforms = [ [ 'vulkan_android.h', [ 'VK_KHR_android_surface', 'VK_ANDROID_external_memory_android_hardware_buffer' @@ -273,6 +279,7 @@ def makeGenOpts(args): [ 'vulkan_xlib.h', [ 'VK_KHR_xlib_surface' ], commonSuppressExtensions ], [ 'vulkan_xlib_xrandr.h', [ 'VK_EXT_acquire_xlib_display' ], commonSuppressExtensions ], [ 'vulkan_metal.h', [ 'VK_EXT_metal_surface' ], commonSuppressExtensions ], + [ 'vulkan_beta.h', betaRequireExtensions, betaSuppressExtensions ], ] for platform in platforms: diff --git a/registry/reg.py b/registry/reg.py index da769ab..24012de 100755 --- a/registry/reg.py +++ b/registry/reg.py @@ -1120,6 +1120,7 @@ class Registry: # Sort the features list, if a sort procedure is defined if self.genOpts.sortProcedure: self.genOpts.sortProcedure(features) + # print('sortProcedure ->', [f.name for f in features]) # Pass 1: loop over requested API versions and extensions tagging # types/commands/features as required (in an block) or no diff --git a/registry/validusage.json b/registry/validusage.json index b0750a3..fd4cbc7 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.2.134", - "comment": "from git branch: github-master commit: b27536168823d5e8b025fc5622eab7df54008589", - "date": "2020-03-06 14:02:32Z" + "api version": "1.2.135", + "comment": "from git branch: github-master commit: 18caa8e0b823981992e6df876ec56d4cfa8bc89f", + "date": "2020-03-17 11:15:18Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -214,7 +214,7 @@ }, { "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, or VkPhysicalDeviceVulkan12Properties" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, or VkPhysicalDeviceVulkan12Properties" }, { "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique", @@ -542,7 +542,7 @@ }, { "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRayTracingFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" }, { "vuid": "VUID-VkDeviceCreateInfo-sType-unique", @@ -610,6 +610,18 @@ } ] }, + "VkDeviceDiagnosticsConfigCreateInfoNV": { + "(VK_NV_device_diagnostics_config)": [ + { + "vuid": "VUID-VkDeviceDiagnosticsConfigCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkDeviceDiagnosticsConfigCreateInfoNV-flags-parameter", + "text": " flags must be a valid combination of VkDeviceDiagnosticsConfigFlagBitsNV values" + } + ] + }, "vkDestroyDevice": { "core": [ { @@ -3421,10 +3433,16 @@ "text": " If image has a depth/stencil format with both depth and stencil components, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" } ], + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-image-02902", + "text": " If image has a color format, then the aspectMask member of subresourceRange must only include VK_IMAGE_ASPECT_COLOR_BIT" + } + ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-VkImageMemoryBarrier-image-01671", - "text": " If image has a single-plane color format or is not disjoint, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT" + "text": " If image has a color format and either the format is single-plane or the image is not disjoint then the aspectMask member of subresourceRange must only include VK_IMAGE_ASPECT_COLOR_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-image-01672", @@ -4371,6 +4389,22 @@ "text": " All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have the same sample count" }, { + "vuid": "VUID-VkSubpassDescription2-pInputAttachments-02897", + "text": " All attachments in pInputAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain at least one of VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT." + }, + { + "vuid": "VUID-VkSubpassDescription2-pColorAttachments-02898", + "text": " All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkSubpassDescription2-pResolveAttachments-02899", + "text": " All attachments in pResolveAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkSubpassDescription2-pDepthStencilAttachment-02900", + "text": " If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have a format whose features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { "vuid": "VUID-VkSubpassDescription2-pDepthStencilAttachment-03071", "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" }, @@ -6022,6 +6056,12 @@ "vuid": "VUID-vkCreateComputePipelines-pipelineCache-parent", "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" } + ], + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateComputePipelines-pipelineCache-02873", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized." + } ] }, "VkComputePipelineCreateInfo": { @@ -6086,6 +6126,50 @@ "vuid": "VUID-VkComputePipelineCreateInfo-commonparent", "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03364", + "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ], + "(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03365", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03366", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03367", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03368", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03369", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03370", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + } + ], + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-02874", + "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + } + ], + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkComputePipelineCreateInfo-pipelineCreationCacheControl-02875", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + } ] }, "VkPipelineShaderStageCreateInfo": { @@ -6304,6 +6388,12 @@ "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-parent", "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" } + ], + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-02876", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized." + } ] }, "VkGraphicsPipelineCreateInfo": { @@ -6462,7 +6552,7 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkGraphicsPipelineShaderGroupsCreateInfoNV, VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-unique", @@ -6660,6 +6750,50 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767", "text": " If the stippledLineEnable member of VkPipelineRasterizationLineStateCreateInfoEXT is VK_TRUE and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, then the lineStippleFactor member of VkPipelineRasterizationLineStateCreateInfoEXT must be in the range [1,256]" } + ], + "(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03371", + "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ], + "(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03372", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03373", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03374", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03375", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03376", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03377", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + } + ], + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-02877", + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" + } + ], + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pipelineCreationCacheControl-02878", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + } ] }, "VkPipelineDynamicStateCreateInfo": { @@ -6686,6 +6820,102 @@ } ] }, + "VkGraphicsPipelineShaderGroupsCreateInfoNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-groupCount-02879", + "text": " groupCount must be at least 1 and as maximum VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxGraphicsShaderGroupCount." + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-groupCount-02880", + "text": " The sum of groupCount including those groups added from referenced pPipelines must also be as maximum VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxGraphicsShaderGroupCount." + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-02881", + "text": " The state of the first element of pGroups must match its equivalent within the parent’s VkGraphicsPipelineCreateInfo." + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-02882", + "text": " Each element of pGroups must in combination with the rest of the pipeline state yield a valid state configuration" + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pPipelines-02886", + "text": " Each element of the pPipelines member of libraries must have been created with identical state to the pipeline currently created except the state that can be overriden by VkGraphicsShaderGroupCreateInfoNV." + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-deviceGeneratedCommands-02887", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-parameter", + "text": " pGroups must be a valid pointer to an array of groupCount valid VkGraphicsShaderGroupCreateInfoNV structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pPipelines-parameter", + "text": " If pipelineCount is not 0, pPipelines must be a valid pointer to an array of pipelineCount valid VkPipeline handles" + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-groupCount-arraylength", + "text": " groupCount must be greater than 0" + } + ], + "(VK_NV_device_generated_commands)+!(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-02883", + "text": " All elements of pGroups must use the same shader stage combinations." + } + ], + "(VK_NV_device_generated_commands)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-02884", + "text": " All elements of pGroups must use the same shader stage combinations unless any mesh shader stage is used, then either combination of task and mesh or just mesh shader is valid." + }, + { + "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pGroups-02885", + "text": " Mesh and regular primitive shading stages cannot be mixed across pGroups." + } + ] + }, + "VkGraphicsShaderGroupCreateInfoNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-stageCount-02888", + "text": " For stageCount, the same restrictions as in VkGraphicsPipelineCreateInfo::stageCount apply." + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-pStages-02889", + "text": " For pStages, the same restrictions as in VkGraphicsPipelineCreateInfo::pStages apply." + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-pVertexInputState-02890", + "text": " For pVertexInputState, the same restrictions as in VkGraphicsPipelineCreateInfo::pVertexInputState apply." + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-pTessellationState-02891", + "text": " For pTessellationState, the same restrictions as in VkGraphicsPipelineCreateInfo::pTessellationState apply." + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-pStages-parameter", + "text": " pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkGraphicsShaderGroupCreateInfoNV-stageCount-arraylength", + "text": " stageCount must be greater than 0" + } + ] + }, "vkDestroyPipeline": { "core": [ { @@ -6757,13 +6987,19 @@ "text": " pNext must be NULL" }, { - "vuid": "VUID-VkPipelineCacheCreateInfo-flags-zerobitmask", - "text": " flags must be 0" + "vuid": "VUID-VkPipelineCacheCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCacheCreateFlagBits values" }, { "vuid": "VUID-VkPipelineCacheCreateInfo-pInitialData-parameter", "text": " If initialDataSize is not 0, pInitialData must be a valid pointer to an array of initialDataSize bytes" } + ], + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkPipelineCacheCreateInfo-pipelineCreationCacheControl-02892", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT" + } ] }, "vkMergePipelineCaches": { @@ -6878,6 +7114,26 @@ } ] }, + "VkPipelineLibraryCreateInfoKHR": { + "(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkPipelineLibraryCreateInfoKHR-pLibraries-03381", + "text": " Each element of pLibraries must have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + }, + { + "vuid": "VUID-VkPipelineLibraryCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkPipelineLibraryCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineLibraryCreateInfoKHR-pLibraries-parameter", + "text": " If libraryCount is not 0, pLibraries must be a valid pointer to an array of libraryCount valid VkPipeline handles" + } + ] + }, "vkCmdBindPipeline": { "core": [ { @@ -6937,14 +7193,64 @@ "text": " This command must not be recorded when transform feedback is active" } ], - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02391", - "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, the VkCommandPool that commandBuffer was allocated from must support compute operations" + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, the VkCommandPool that commandBuffer was allocated from must support compute operations" }, { "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02392", - "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, the pipeline must be a ray tracing pipeline" + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, the pipeline must be a ray tracing pipeline" + } + ], + "(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-vkCmdBindPipeline-pipeline-03382", + "text": " The pipeline must not have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR set." + } + ] + }, + "vkCmdBindPipelineShaderGroupNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-groupIndex-02893", + "text": " groupIndex must be 0 or less than the effective VkGraphicsPipelineShaderGroupsCreateInfoNV::groupCount including the referenced pipelines." + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-pipelineBindPoint-02894", + "text": " The pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-groupIndex-02895", + "text": " The same restrictions as vkCmdBindPipeline apply as if the bound pipeline was created only with the Shader Group from the groupIndex information." + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-deviceGeneratedCommands-02896", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-commonparent", + "text": " Both of commandBuffer, and pipeline must have been created, allocated, or retrieved from the same VkDevice" } ] }, @@ -7157,13 +7463,13 @@ ] }, "vkCreateRayTracingPipelinesNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-02402", + "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03415", "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" }, { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-02403", + "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03416", "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" }, { @@ -7194,44 +7500,100 @@ "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent", "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-02903", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized." + } + ] + }, + "vkCreateRayTracingPipelinesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03415", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03416", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-rayTracing-03455", + "text": " The rayTracing feature must be enabled" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoKHR structures" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-02903", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized." + } ] }, "VkRayTracingPipelineCreateInfoNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02404", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03421", "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02405", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03422", "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02406", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03423", "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02407", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03424", "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stage-02408", - "text": " The stage member of one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stage-03425", + "text": " The stage member of at least one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-03426", "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-02410", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03427", "text": " layout must be consistent with all shaders specified in pStages" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-02411", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03428", "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" }, { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412", + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-03457", "text": " maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxRecursionDepth" }, { @@ -7274,84 +7636,372 @@ "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-commonparent", "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } - ] - }, - "VkRayTracingShaderGroupCreateInfoNV": { - "(VK_NV_ray_tracing)": [ + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then generalShader must be a valid index into pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_NV, VK_SHADER_STAGE_MISS_BIT_NV, or VK_SHADER_STAGE_CALLABLE_BIT_NV" - }, + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02904", + "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pipelineCreationCacheControl-02905", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV then intersectionShader must be a valid index into pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_NV" - }, + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02957", + "text": " flags must not include both VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV and VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT at the same time." + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_KHR_pipeline_library)": [ { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV then intersectionShader must be VK_SHADER_UNUSED_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03456", + "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03458", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", - "text": " closestHitShader must be either VK_SHADER_UNUSED_NV or a valid index into pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03459", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", - "text": " anyHitShader must be either VK_SHADER_UNUSED_NV or a valid index into pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03460", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03461", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext", - "text": " pNext must be NULL" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03462", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" }, { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", - "text": " type must be a valid VkRayTracingShaderGroupTypeNV value" + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03463", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" } ] }, - "vkGetRayTracingShaderGroupHandlesNV": { - "(VK_NV_ray_tracing)": [ + "VkRayTracingPipelineCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-firstGroup-02419", - "text": " The sum of firstGroup and groupCount must be less than the number of shader groups in pipeline." + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03421", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-02420", - "text": " dataSize must be at least VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize {times} groupCount" + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03422", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter", - "text": " device must be a valid VkDevice handle" + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03423", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03424", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter", - "text": " pData must be a valid pointer to an array of dataSize bytes" + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-stage-03425", + "text": " The stage member of at least one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength", - "text": " dataSize must be greater than 0" + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-03426", + "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" }, { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent", - "text": " pipeline must have been created, allocated, or retrieved from device" - } - ] - }, - "vkCompileDeferredNV": { - "(VK_NV_ray_tracing)": [ + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03427", + "text": " layout must be consistent with all shaders specified in pStages" + }, { - "vuid": "VUID-vkCompileDeferredNV-pipeline-02237", + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03428", + "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-maxRecursionDepth-03464", + "text": " maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxRecursionDepth" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03470", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the anyHitShader of that element must not be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03471", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the closestHitShader of that element must not be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPrimitiveCulling-03472", + "text": " If the rayTracingPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPrimitiveCulling-03473", + "text": " If the rayTracingPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-02958", + "text": " If libraries.pname:libraryCount is zero, then stageCount must not be zero" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-02959", + "text": " If libraries.pname:libraryCount is zero, then groupCount must not be zero" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR or VkPipelineCreationFeedbackCreateInfoEXT" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-parameter", + "text": " If stageCount is not 0, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pGroups-parameter", + "text": " If groupCount is not 0, pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoKHR structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-parameter", + "text": " libraries must be a valid VkPipelineLibraryCreateInfoKHR structure" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInterface-parameter", + "text": " If pLibraryInterface is not NULL, pLibraryInterface must be a valid pointer to a valid VkRayTracingPipelineInterfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-commonparent", + "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-02904", + "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pipelineCreationCacheControl-02905", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03465", + "text": " If flags includes VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, pLibraryInterface must not be NULL" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraryCount-03466", + "text": " If the libraryCount member of libraries is greater than 0, pLibraryInterface must not be NULL" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03467", + "text": " Each element of the pLibraries member of libraries must have been created with the value of maxRecursionDepth equal to that in this pipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03468", + "text": " Each element of the pLibraries member of libraries must have been created with a layout that is compatible with the layout in this pipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03469", + "text": " Each element of the pLibraries member of libraries must have been created with values of the maxPayloadSize, maxAttributeSize, and maxCallableSize members of pLibraryInterface equal to those in this pipeline" + } + ] + }, + "VkRayTracingShaderGroupCreateInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then generalShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_NV, VK_SHADER_STAGE_MISS_BIT_NV, or VK_SHADER_STAGE_CALLABLE_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV then intersectionShader must be VK_SHADER_UNUSED_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", + "text": " closestHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", + "text": " anyHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", + "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" + } + ] + }, + "VkRayTracingShaderGroupCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03474", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then generalShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_KHR, VK_SHADER_STAGE_MISS_BIT_KHR, or VK_SHADER_STAGE_CALLABLE_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03475", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03476", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03477", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR then intersectionShader must be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-closestHitShader-03478", + "text": " closestHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-anyHitShader-03479", + "text": " anyHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03480", + "text": " If VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplayMixed is VK_FALSE then pShaderGroupCaptureReplayHandle must not be provided if it has not been provided on a previous call to ray tracing pipeline creation" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03481", + "text": " If VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplayMixed is VK_FALSE then the caller must guarantee that no ray tracing pipeline creation commands with pShaderGroupCaptureReplayHandle provided execute simultaneously with ray tracing pipeline creation commands without pShaderGroupCaptureReplayHandle provided" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-parameter", + "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" + } + ] + }, + "VkRayTracingPipelineInterfaceCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetRayTracingShaderGroupHandlesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-02419", + "text": " The sum of firstGroup and groupCount must be less than the number of shader groups in pipeline." + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-02420", + "text": " dataSize must be at least VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize {times} groupCount" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-03482", + "text": " pipeline must have not been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ] + }, + "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-03483", + "text": " The sum of firstGroup and groupCount must be less than the number of shader groups in pipeline." + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-03484", + "text": " dataSize must be at least VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleCaptureReplaySize {times} groupCount" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-rayTracingShaderGroupHandleCaptureReplay-03485", + "text": " VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplay must be enabled to call this function." + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCompileDeferredNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCompileDeferredNV-pipeline-02237", "text": " pipeline must have been created with VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV" }, { @@ -7399,10 +8049,10 @@ "text": " pipelineStageCreationFeedbackCount must be greater than 0" } ], - "(VK_EXT_pipeline_creation_feedback)+(VK_NV_ray_tracing)": [ + "(VK_EXT_pipeline_creation_feedback)+(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670", - "text": " When chained to VkRayTracingPipelineCreateInfoNV, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkRayTracingPipelineCreateInfoNV::stageCount" + "text": " When chained to VkRayTracingPipelineCreateInfoKHR, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkRayTracingPipelineCreateInfoKHR::stageCount" } ] }, @@ -9827,10 +10477,6 @@ "text": " If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created" }, { - "vuid": "VUID-VkImageViewCreateInfo-image-01018", - "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be compatible with the format used to create image, as defined in Format Compatibility Classes" - }, - { "vuid": "VUID-VkImageViewCreateInfo-image-01020", "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, @@ -9945,6 +10591,12 @@ "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" } ], + "!(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01018", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be compatible with the format used to create image, as defined in Format Compatibility Classes" + } + ], "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-VkImageViewCreateInfo-image-01759", @@ -11031,7 +11683,7 @@ ] }, "vkCreateAccelerationStructureNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkCreateAccelerationStructureNV-device-parameter", "text": " device must be a valid VkDevice handle" @@ -11051,7 +11703,7 @@ ] }, "VkAccelerationStructureCreateInfoNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421", "text": " If compactedSize is not 0 then both info.geometryCount and info.instanceCount must be 0" @@ -11071,7 +11723,7 @@ ] }, "VkAccelerationStructureInfoNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-VkAccelerationStructureInfoNV-geometryCount-02422", "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" @@ -11097,6 +11749,10 @@ "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then the geometryType member of each geometry in pGeometries must be the same" }, { + "vuid": "VUID-VkAccelerationStructureInfoNV-flags-03486", + "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values" + }, + { "vuid": "VUID-VkAccelerationStructureInfoNV-flags-02592", "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set" }, @@ -11121,8 +11777,8 @@ "text": " type must be a valid VkAccelerationStructureTypeNV value" }, { - "vuid": "VUID-VkAccelerationStructureInfoNV-flags-parameter", - "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values" + "vuid": "VUID-VkAccelerationStructureInfoNV-flags-zerobitmask", + "text": " flags must be 0" }, { "vuid": "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", @@ -11130,111 +11786,251 @@ } ] }, - "VkGeometryNV": { - "(VK_NV_ray_tracing)": [ + "vkCreateAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { - "vuid": "VUID-VkGeometryNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_NV" + "vuid": "VUID-vkCreateAccelerationStructureKHR-rayTracing-03487", + "text": " The rayTracing or rayQuery feature must be enabled" }, { - "vuid": "VUID-VkGeometryNV-pNext-pNext", - "text": " pNext must be NULL" + "vuid": "VUID-vkCreateAccelerationStructureKHR-deviceAddress-03488", + "text": " If VkAccelerationStructureCreateInfoKHR::deviceAddress is not zero, the rayTracingAccelerationStructureCaptureReplay feature must be enabled" }, { - "vuid": "VUID-VkGeometryNV-geometryType-parameter", - "text": " geometryType must be a valid VkGeometryTypeNV value" + "vuid": "VUID-vkCreateAccelerationStructureKHR-device-03489", + "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" }, { - "vuid": "VUID-VkGeometryNV-geometry-parameter", - "text": " geometry must be a valid VkGeometryDataNV structure" + "vuid": "VUID-vkCreateAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkGeometryNV-flags-parameter", - "text": " flags must be a valid combination of VkGeometryFlagBitsNV values" - } - ] - }, - "VkGeometryDataNV": { - "(VK_NV_ray_tracing)": [ + "vuid": "VUID-vkCreateAccelerationStructureKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoKHR structure" + }, { - "vuid": "VUID-VkGeometryDataNV-triangles-parameter", - "text": " triangles must be a valid VkGeometryTrianglesNV structure" + "vuid": "VUID-vkCreateAccelerationStructureKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-VkGeometryDataNV-aabbs-parameter", - "text": " aabbs must be a valid VkGeometryAABBNV structure" + "vuid": "VUID-vkCreateAccelerationStructureKHR-pAccelerationStructure-parameter", + "text": " pAccelerationStructure must be a valid pointer to a VkAccelerationStructureKHR handle" } ] }, - "VkGeometryTrianglesNV": { - "(VK_NV_ray_tracing)": [ + "VkAccelerationStructureCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { - "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02428", - "text": " vertexOffset must be less than the size of vertexData" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-compactedSize-03490", + "text": " If compactedSize is not 0 then maxGeometryCount must be 0" }, { - "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02429", - "text": " vertexOffset must be a multiple of the component size of vertexFormat" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03491", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then maxGeometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxGeometryCount" }, { - "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-02430", - "text": " vertexFormat must be one of VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SNORM, or VK_FORMAT_R16G16B16_SNORM" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03492", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR then pGeometryInfos->pname:maxPrimitiveCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxInstanceCount" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02431", - "text": " indexOffset must be less than the size of indexData" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-maxPrimitiveCount-03493", + "text": " The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxPrimitiveCount" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02432", - "text": " indexOffset must be a multiple of the element size of indexType" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-maxPrimitiveCount-03494", + "text": " The total number of AABBs in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxPrimitiveCount" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexType-02433", - "text": " indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_NV" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03495", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR and compactedSize is 0, maxGeometryCount must be 1" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-02434", - "text": " indexData must be VK_NULL_HANDLE if indexType is VK_INDEX_TYPE_NONE_NV" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03496", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR and compactedSize is 0, the geometryType member of elements of pGeometryInfos must be VK_GEOMETRY_TYPE_INSTANCES_KHR" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-02435", - "text": " indexData must be a valid VkBuffer handle if indexType is not VK_INDEX_TYPE_NONE_NV" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03497", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR and compactedSize is 0, the geometryType member of elements of pGeometryInfos must not be VK_GEOMETRY_TYPE_INSTANCES_KHR" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexCount-02436", - "text": " indexCount must be 0 if indexType is VK_INDEX_TYPE_NONE_NV" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03498", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then the geometryType member of each geometry in pGeometryInfos must be the same" }, { - "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02437", - "text": " transformOffset must be less than the size of transformData" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-flags-03499", + "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR bit set" }, { - "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02438", - "text": " transformOffset must be a multiple of 16" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-deviceAddress-03500", + "text": " If deviceAddress is not 0, VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingAccelerationStructureCaptureReplay must be VK_TRUE" }, { - "vuid": "VUID-VkGeometryTrianglesNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR" }, { - "vuid": "VUID-VkGeometryTrianglesNV-pNext-pNext", + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkGeometryTrianglesNV-vertexData-parameter", - "text": " If vertexData is not VK_NULL_HANDLE, vertexData must be a valid VkBuffer handle" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-parameter", + "text": " type must be a valid VkAccelerationStructureTypeKHR value" }, { - "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-parameter", - "text": " vertexFormat must be a valid VkFormat value" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsKHR values" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-parameter", - "text": " If indexData is not VK_NULL_HANDLE, indexData must be a valid VkBuffer handle" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-pGeometryInfos-parameter", + "text": " pGeometryInfos must be a valid pointer to an array of maxGeometryCount valid VkAccelerationStructureCreateGeometryTypeInfoKHR structures" }, { - "vuid": "VUID-VkGeometryTrianglesNV-indexType-parameter", - "text": " indexType must be a valid VkIndexType value" + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-maxGeometryCount-arraylength", + "text": " maxGeometryCount must be greater than 0" + } + ] + }, + "VkAccelerationStructureCreateGeometryTypeInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-03501", + "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, vertexFormat must support the VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR in VkFormatProperties::bufferFeatures as returned by vkGetPhysicalDeviceFormatProperties2" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-03502", + "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-parameter", + "text": " geometryType must be a valid VkGeometryTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-vertexFormat-parameter", + "text": " If vertexFormat is not 0, vertexFormat must be a valid VkFormat value" + } + ] + }, + "VkGeometryNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryNV-geometryType-03503", + "text": " geometryType must be VK_GEOMETRY_TYPE_TRIANGLES_NV or VK_GEOMETRY_TYPE_AABBS_NV" + }, + { + "vuid": "VUID-VkGeometryNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_NV" + }, + { + "vuid": "VUID-VkGeometryNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGeometryNV-geometryType-parameter", + "text": " geometryType must be a valid VkGeometryTypeKHR value" + }, + { + "vuid": "VUID-VkGeometryNV-geometry-parameter", + "text": " geometry must be a valid VkGeometryDataNV structure" + }, + { + "vuid": "VUID-VkGeometryNV-flags-parameter", + "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" + } + ] + }, + "VkGeometryDataNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryDataNV-triangles-parameter", + "text": " triangles must be a valid VkGeometryTrianglesNV structure" + }, + { + "vuid": "VUID-VkGeometryDataNV-aabbs-parameter", + "text": " aabbs must be a valid VkGeometryAABBNV structure" + } + ] + }, + "VkGeometryTrianglesNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02428", + "text": " vertexOffset must be less than the size of vertexData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02429", + "text": " vertexOffset must be a multiple of the component size of vertexFormat" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-02430", + "text": " vertexFormat must be one of VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SNORM, or VK_FORMAT_R16G16B16_SNORM" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02431", + "text": " indexOffset must be less than the size of indexData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02432", + "text": " indexOffset must be a multiple of the element size of indexType" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexType-02433", + "text": " indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-02434", + "text": " indexData must be VK_NULL_HANDLE if indexType is VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-02435", + "text": " indexData must be a valid VkBuffer handle if indexType is not VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexCount-02436", + "text": " indexCount must be 0 if indexType is VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02437", + "text": " transformOffset must be less than the size of transformData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02438", + "text": " transformOffset must be a multiple of 16" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexData-parameter", + "text": " If vertexData is not VK_NULL_HANDLE, vertexData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-parameter", + "text": " vertexFormat must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-parameter", + "text": " If indexData is not VK_NULL_HANDLE, indexData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" }, { "vuid": "VUID-VkGeometryTrianglesNV-transformData-parameter", @@ -11247,7 +12043,7 @@ ] }, "VkGeometryAABBNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-VkGeometryAABBNV-offset-02439", "text": " offset must be less than the size of aabbData" @@ -11274,40 +12070,40 @@ } ] }, - "vkDestroyAccelerationStructureNV": { - "(VK_NV_ray_tracing)": [ + "vkDestroyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { - "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02442", "text": " All submitted commands that refer to accelerationStructure must have completed execution" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02443", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02443", "text": " If VkAllocationCallbacks were provided when accelerationStructure was created, a compatible set of callbacks must be provided here" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02444", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02444", "text": " If no VkAllocationCallbacks were provided when accelerationStructure was created, pAllocator must be NULL" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-device-parameter", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-pAllocator-parameter", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-pAllocator-parameter", "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent", + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parent", "text": " accelerationStructure must have been created, allocated, or retrieved from device" } ] }, "vkGetAccelerationStructureMemoryRequirementsNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", "text": " device must be a valid VkDevice handle" @@ -11323,7 +12119,7 @@ ] }, "VkAccelerationStructureMemoryRequirementsInfoNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV" @@ -11342,79 +12138,119 @@ } ] }, - "vkBindAccelerationStructureMemoryNV": { - "(VK_NV_ray_tracing)": [ + "vkGetAccelerationStructureMemoryRequirementsKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, { - "vuid": "VUID-vkBindAccelerationStructureMemoryNV-device-parameter", + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureMemoryRequirementsInfoKHR structure" + }, + { + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" + } + ] + }, + "VkAccelerationStructureMemoryRequirementsInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-type-parameter", + "text": " type must be a valid VkAccelerationStructureMemoryRequirementsTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-buildType-parameter", + "text": " buildType must be a valid VkAccelerationStructureBuildTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" + } + ] + }, + "vkBindAccelerationStructureMemoryKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter", - "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindAccelerationStructureMemoryInfoNV structures" + "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-pBindInfos-parameter", + "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindAccelerationStructureMemoryInfoKHR structures" }, { - "vuid": "VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength", + "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-bindInfoCount-arraylength", "text": " bindInfoCount must be greater than 0" } ] }, - "VkBindAccelerationStructureMemoryInfoNV": { - "(VK_NV_ray_tracing)": [ + "VkBindAccelerationStructureMemoryInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-accelerationStructure-02450", "text": " accelerationStructure must not already be backed by a memory object" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02451", "text": " memoryOffset must be less than the size of memory" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593", - "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV" + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memory-02593", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594", - "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV" + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02594", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595", - "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV must be less than or equal to the size of memory minus memoryOffset" + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-size-02595", + "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR must be less than or equal to the size of memory minus memoryOffset" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV" + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memory-parameter", "text": " memory must be a valid VkDeviceMemory handle" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-pDeviceIndices-parameter", "text": " If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values" }, { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent", + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-commonparent", "text": " Both of accelerationStructure, and memory must have been created, allocated, or retrieved from the same VkDevice" } ] }, "vkGetAccelerationStructureHandleNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240", "text": " dataSize must be large enough to contain the result of the query, as described above" }, { "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787", - "text": " accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryNV" + "text": " accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryKHR" }, { "vuid": "VUID-vkGetAccelerationStructureHandleNV-device-parameter", @@ -11422,7 +12258,7 @@ }, { "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" }, { "vuid": "VUID-vkGetAccelerationStructureHandleNV-pData-parameter", @@ -11438,6 +12274,38 @@ } ] }, + "vkGetAccelerationStructureDeviceAddressKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-03504", + "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" + }, + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureDeviceAddressInfoKHR structure" + } + ] + }, + "VkAccelerationStructureDeviceAddressInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" + } + ] + }, "vkCreateSampler": { "core": [ { @@ -11705,6 +12573,10 @@ { "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649", "text": " format must not be VK_FORMAT_UNDEFINED" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01653", + "text": " If an external format conversion is not being created, format must represent unsigned normalized values (i.e. the format must be a UNORM format)" } ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ @@ -11727,10 +12599,6 @@ "text": " If the format does not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_MIDPOINT" }, { - "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01653", - "text": " format must represent unsigned normalized values (i.e. the format must be a UNORM format)" - }, - { "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02581", "text": " If the format has a _422 or _420 suffix, then components.g must be VK_COMPONENT_SWIZZLE_IDENTITY" }, @@ -12333,10 +13201,10 @@ "text": " pSetLayouts must not contain more than one descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set" } ], - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02381", - "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxDescriptorSetAccelerationStructures" + "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxDescriptorSetAccelerationStructures" } ] }, @@ -12751,18 +13619,6 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout members of each element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively" }, { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01946", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, then the imageView member of each pImageInfo element must have been created without a VkSamplerYcbcrConversionInfo structure in its pNext chain" - }, - { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02738", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and if any element of pImageInfo has a imageView member that was created with a VkSamplerYcbcrConversionInfo structure in its pNext chain, then dstSet must have been allocated with a layout that included immutable samplers for dstBinding, and the corresponding immutable sampler must have been created with an identically defined VkSamplerYcbcrConversionInfo object" - }, - { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01948", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was allocated with a layout that included immutable samplers for dstBinding, then the imageView member of each element of pImageInfo which corresponds to an immutable sampler that enables sampler {YCbCr} conversion must have been created with a VkSamplerYcbcrConversionInfo structure in its pNext chain with an identically defined VkSamplerYcbcrConversionInfo to the corresponding immutable sampler" - }, - { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01402", "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, for each descriptor that will be accessed via load or store operations the imageLayout member for corresponding elements of pImageInfo must be VK_IMAGE_LAYOUT_GENERAL" }, @@ -12832,7 +13688,7 @@ }, { "vuid": "VUID-VkWriteDescriptorSet-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkWriteDescriptorSetAccelerationStructureNV or VkWriteDescriptorSetInlineUniformBlockEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkWriteDescriptorSetAccelerationStructureKHR or VkWriteDescriptorSetInlineUniformBlockEXT" }, { "vuid": "VUID-VkWriteDescriptorSet-sType-unique", @@ -12865,10 +13721,24 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, the pNext chain must include a VkWriteDescriptorSetInlineUniformBlockEXT structure whose dataSize member equals descriptorCount" } ], - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02382", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureNV structure whose accelerationStructureCount member equals descriptorCount" + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureKHR structure whose accelerationStructureCount member equals descriptorCount" + } + ], + "(VK_VULKAN_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01946", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, then the imageView member of each pImageInfo element must have been created without a VkSamplerYcbcrConversionInfo structure in its pNext chain" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02738", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and if any element of pImageInfo has a imageView member that was created with a VkSamplerYcbcrConversionInfo structure in its pNext chain, then dstSet must have been allocated with a layout that included immutable samplers for dstBinding, and the corresponding immutable sampler must have been created with an identically defined VkSamplerYcbcrConversionInfo object" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01948", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was allocated with a layout that included immutable samplers for dstBinding, then the imageView member of each element of pImageInfo which corresponds to an immutable sampler that enables sampler {YCbCr} conversion must have been created with a VkSamplerYcbcrConversionInfo structure in its pNext chain with an identically defined VkSamplerYcbcrConversionInfo to the corresponding immutable sampler" } ], "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)": [ @@ -12946,26 +13816,26 @@ } ] }, - "VkWriteDescriptorSetAccelerationStructureNV": { - "(VK_NV_ray_tracing)": [ + "VkWriteDescriptorSetAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236", + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-accelerationStructureCount-02236", "text": " accelerationStructureCount must be equal to descriptorCount in the extended structure" }, { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764", - "text": " Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV" + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-02764", + "text": " Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR" }, { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV" + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR" }, { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter", - "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles" + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" }, { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength", + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-accelerationStructureCount-arraylength", "text": " accelerationStructureCount must be greater than 0" } ] @@ -13522,7 +14392,7 @@ }, { "vuid": "VUID-VkQueryPoolCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkQueryPoolCreateInfoINTEL or VkQueryPoolPerformanceCreateInfoKHR" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkQueryPoolPerformanceCreateInfoKHR or VkQueryPoolPerformanceQueryCreateInfoINTEL" }, { "vuid": "VUID-VkQueryPoolCreateInfo-sType-unique", @@ -14346,14 +15216,14 @@ } ] }, - "VkQueryPoolCreateInfoINTEL": { + "VkQueryPoolPerformanceQueryCreateInfoINTEL": { "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [ { - "vuid": "VUID-VkQueryPoolCreateInfoINTEL-sType-sType", + "vuid": "VUID-VkQueryPoolPerformanceQueryCreateInfoINTEL-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL" }, { - "vuid": "VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter", + "vuid": "VUID-VkQueryPoolPerformanceQueryCreateInfoINTEL-performanceCountersSampling-parameter", "text": " performanceCountersSampling must be a valid VkQueryPoolSamplingModeINTEL value" } ] @@ -15147,14 +16017,6 @@ "vkCmdCopyImage": { "core": [ { - "vuid": "VUID-vkCmdCopyImage-pRegions-00122", - "text": " The source region specified by each element of pRegions must be a region that is contained within srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImage-pRegions-00123", - "text": " The destination region specified by each element of pRegions must be a region that is contained within dstImage" - }, - { "vuid": "VUID-vkCmdCopyImage-pRegions-00124", "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" }, @@ -15287,10 +16149,6 @@ { "vuid": "VUID-vkCmdCopyImage-None-01549", "text": " In a copy to or from a plane of a multi-planar image, the VkFormat of the image and plane must be compatible according to the description of compatible planes for the plane being copied" - }, - { - "vuid": "VUID-vkCmdCopyImage-aspectMask-01550", - "text": " When a copy is performed to or from an image with a multi-planar format, the aspectMask of the srcSubresource and/or dstSubresource that refers to the multi-planar image must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for a VkFormat with three planes)" } ], "!(VK_KHR_shared_presentable_image)": [ @@ -16569,10 +17427,10 @@ "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" } ], - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02507", - "text": " indexType must not be VK_INDEX_TYPE_NONE_NV." + "text": " indexType must not be VK_INDEX_TYPE_NONE_KHR." } ], "(VK_EXT_index_type_uint8)": [ @@ -20748,711 +21606,473 @@ } ] }, - "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX": { - "(VK_NVX_device_generated_commands)": [ + "vkCreateIndirectCommandsLayoutNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter", - "text": " physicalDevice must be a valid VkPhysicalDevice handle" + "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-deviceGeneratedCommands-02929", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" }, { - "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter", - "text": " pFeatures must be a valid pointer to a VkDeviceGeneratedCommandsFeaturesNVX structure" + "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter", - "text": " pLimits must be a valid pointer to a VkDeviceGeneratedCommandsLimitsNVX structure" - } - ] - }, - "VkDeviceGeneratedCommandsFeaturesNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX" + "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkIndirectCommandsLayoutCreateInfoNV structure" }, { - "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-pNext-pNext", - "text": " pNext must be NULL" - } - ] - }, - "VkDeviceGeneratedCommandsLimitsNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX" + "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-pNext-pNext", - "text": " pNext must be NULL" + "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-pIndirectCommandsLayout-parameter", + "text": " pIndirectCommandsLayout must be a valid pointer to a VkIndirectCommandsLayoutNV handle" } ] }, - "vkCreateObjectTableNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkIndirectCommandsLayoutCreateInfoNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkCreateObjectTableNVX-device-parameter", - "text": " device must be a valid VkDevice handle" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-02930", + "text": " The pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS" }, { - "vuid": "VUID-vkCreateObjectTableNVX-pCreateInfo-parameter", - "text": " pCreateInfo must be a valid pointer to a valid VkObjectTableCreateInfoNVX structure" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-tokenCount-02931", + "text": " tokenCount must be greater than 0 and less than or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectCommandsTokenCount" }, { - "vuid": "VUID-vkCreateObjectTableNVX-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02932", + "text": " If pTokens contains an entry of VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV it must be the first element of the array and there must be only a single element of such token type." }, { - "vuid": "VUID-vkCreateObjectTableNVX-pObjectTable-parameter", - "text": " pObjectTable must be a valid pointer to a VkObjectTableNVX handle" - } - ] - }, - "VkObjectTableCreateInfoNVX": { - "(VK_NVX_device_generated_commands)": [ + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02933", + "text": " If pTokens contains an entry of VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV there must be only a single element of such token type." + }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-computeBindingPointSupport-01355", - "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, pObjectEntryUsageFlags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02934", + "text": " All state tokens in pTokens must occur prior work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV)." }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-01356", - "text": " Any value within pObjectEntryCounts must not exceed VkDeviceGeneratedCommandsLimitsNVX::maxObjectEntryCounts" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02935", + "text": " The content of pTokens must include one single work provoking token that is compatible with the pipelineBindPoint." }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-maxUniformBuffersPerDescriptor-01357", - "text": " maxUniformBuffersPerDescriptor must be within the limits supported by the device." + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-streamCount-02936", + "text": " streamCount must be greater than 0 and less or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectCommandsStreamCount" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageBuffersPerDescriptor-01358", - "text": " maxStorageBuffersPerDescriptor must be within the limits supported by the device." + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pStreamStrides-02937", + "text": " each element of pStreamStrides must be greater than `0`and less than or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectCommandsStreamStride. Furthermore the alignment of each token input must be ensured." }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageImagesPerDescriptor-01359", - "text": " maxStorageImagesPerDescriptor must be within the limits supported by the device." + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-maxSampledImagesPerDescriptor-01360", - "text": " maxSampledImagesPerDescriptor must be within the limits supported by the device." + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-flags-parameter", + "text": " flags must be a valid combination of VkIndirectCommandsLayoutUsageFlagBitsNV values" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pNext-pNext", - "text": " pNext must be NULL" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-flags-requiredbitmask", + "text": " flags must not be 0" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryTypes-parameter", - "text": " pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter", - "text": " pObjectEntryCounts must be a valid pointer to an array of objectCount uint32_t values" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-parameter", + "text": " pTokens must be a valid pointer to an array of tokenCount valid VkIndirectCommandsLayoutTokenNV structures" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-parameter", - "text": " pObjectEntryUsageFlags must be a valid pointer to an array of objectCount valid combinations of VkObjectEntryUsageFlagBitsNVX values" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pStreamStrides-parameter", + "text": " pStreamStrides must be a valid pointer to an array of streamCount uint32_t values" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-requiredbitmask", - "text": " Each element of pObjectEntryUsageFlags must not be 0" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-tokenCount-arraylength", + "text": " tokenCount must be greater than 0" }, { - "vuid": "VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength", - "text": " objectCount must be greater than 0" + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-streamCount-arraylength", + "text": " streamCount must be greater than 0" } ] }, - "vkDestroyObjectTableNVX": { - "(VK_NVX_device_generated_commands)": [ + "vkDestroyIndirectCommandsLayoutNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-indirectCommandsLayout-02938", + "text": " All submitted commands that refer to indirectCommandsLayout must have completed execution" + }, { - "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01361", - "text": " All submitted commands that refer to objectTable must have completed execution." + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-indirectCommandsLayout-02939", + "text": " If VkAllocationCallbacks were provided when indirectCommandsLayout was created, a compatible set of callbacks must be provided here" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01362", - "text": " If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here." + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-indirectCommandsLayout-02940", + "text": " If no VkAllocationCallbacks were provided when indirectCommandsLayout was created, pAllocator must be NULL" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01363", - "text": " If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL." + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-deviceGeneratedCommands-02941", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-device-parameter", + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parameter", - "text": " objectTable must be a valid VkObjectTableNVX handle" + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-indirectCommandsLayout-parameter", + "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNV handle" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-pAllocator-parameter", + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-pAllocator-parameter", "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parent", - "text": " objectTable must have been created, allocated, or retrieved from device" + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-indirectCommandsLayout-parent", + "text": " indirectCommandsLayout must have been created, allocated, or retrieved from device" } ] }, - "vkRegisterObjectsNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkIndirectCommandsStreamNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkRegisterObjectsNVX-pObjectTableEntry-01364", - "text": " The contents of pObjectTableEntry must yield plausible bindings supported by the device." + "vuid": "VUID-VkIndirectCommandsStreamNV-buffer-02942", + "text": " The buffer’s usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set." }, { - "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01365", - "text": " At any pObjectIndices there must not be a registered resource already." + "vuid": "VUID-VkIndirectCommandsStreamNV-offset-02943", + "text": " The offset must be aligned to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::minIndirectCommandsBufferOffsetAlignment." }, { - "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01366", - "text": " Any value inside pObjectIndices must be below the appropriate VkObjectTableCreateInfoNVX::pObjectEntryCounts limits provided at objectTable creation time." - }, + "vuid": "VUID-VkIndirectCommandsStreamNV-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ] + }, + "VkBindShaderGroupIndirectCommandNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkRegisterObjectsNVX-device-parameter", - "text": " device must be a valid VkDevice handle" + "vuid": "VUID-VkBindShaderGroupIndirectCommandNV-None-02944", + "text": " The current bound graphics pipeline, as well as the pipelines it may reference, must have been created with VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" }, { - "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parameter", - "text": " objectTable must be a valid VkObjectTableNVX handle" - }, + "vuid": "VUID-VkBindShaderGroupIndirectCommandNV-index-02945", + "text": " The index must be within range of the accessible shader groups of the current bound graphics pipeline. See vkCmdBindPipelineShaderGroupNV for further details." + } + ] + }, + "VkBindIndexBufferIndirectCommandNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter", - "text": " ppObjectTableEntries must be a valid pointer to an array of objectCount valid VkObjectTableEntryNVX structures" + "vuid": "VUID-VkBindIndexBufferIndirectCommandNV-None-02946", + "text": " The buffer’s usage flag from which the address was acquired must have the VK_BUFFER_USAGE_INDEX_BUFFER_BIT bit set." }, { - "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-parameter", - "text": " pObjectIndices must be a valid pointer to an array of objectCount uint32_t values" + "vuid": "VUID-VkBindIndexBufferIndirectCommandNV-bufferAddress-02947", + "text": " The bufferAddress must be aligned to the indexType used." }, { - "vuid": "VUID-vkRegisterObjectsNVX-objectCount-arraylength", - "text": " objectCount must be greater than 0" + "vuid": "VUID-VkBindIndexBufferIndirectCommandNV-None-02948", + "text": " Each element of the buffer from which the address was acquired and that is non-sparse must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parent", - "text": " objectTable must have been created, allocated, or retrieved from device" + "vuid": "VUID-VkBindIndexBufferIndirectCommandNV-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" } ] }, - "VkObjectTableEntryNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkBindVertexBufferIndirectCommandNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkObjectTableEntryNVX-computeBindingPointSupport-01367", - "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, flags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX" + "vuid": "VUID-VkBindVertexBufferIndirectCommandNV-None-02949", + "text": " The buffer’s usage flag from which the address was acquired must have the VK_BUFFER_USAGE_VERTEX_BUFFER_BIT bit set." }, { - "vuid": "VUID-VkObjectTableEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" - }, - { - "vuid": "VUID-VkObjectTableEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" - }, - { - "vuid": "VUID-VkObjectTableEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" + "vuid": "VUID-VkBindVertexBufferIndirectCommandNV-None-02950", + "text": " Each element of the buffer from which the address was acquired and that is non-sparse must be bound completely and contiguously to a single VkDeviceMemory object" } ] }, - "VkObjectTablePipelineEntryNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkIndirectCommandsLayoutTokenNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-01368", - "text": " type must be VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-stream-02951", + "text": " stream must be smaller than VkIndirectCommandsLayoutCreateInfoNV::streamCount." }, { - "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-offset-02952", + "text": " offset must be less than or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectCommandsTokenOffset." }, { - "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-vertexBindingUnit-02953", + "text": " vertexBindingUnit must stay within device supported limits for the appropriate commands." }, { - "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" - }, - { - "vuid": "VUID-VkObjectTablePipelineEntryNVX-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" - } - ] - }, - "VkObjectTableDescriptorSetEntryNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-01369", - "text": " type must be VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pushconstantOffset-02954", + "text": " pushconstantOffset must stay within device supported limits for the appropriate commands." }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pushconstantSize-02955", + "text": " pushconstantSize must stay within device supported limits for the appropriate commands." }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-indirectStateFlags-02956", + "text": " indirectStateFlags must not be ´0´." }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV" }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-pipelineLayout-parameter", - "text": " pipelineLayout must be a valid VkPipelineLayout handle" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pNext-pNext", + "text": " pNext must be NULL" }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-descriptorSet-parameter", - "text": " descriptorSet must be a valid VkDescriptorSet handle" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-tokenType-parameter", + "text": " tokenType must be a valid VkIndirectCommandsTokenTypeNV value" }, { - "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-commonparent", - "text": " Both of descriptorSet, and pipelineLayout must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "VkObjectTableVertexBufferEntryNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-01370", - "text": " type must be VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pushconstantPipelineLayout-parameter", + "text": " If pushconstantPipelineLayout is not VK_NULL_HANDLE, pushconstantPipelineLayout must be a valid VkPipelineLayout handle" }, { - "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pushconstantShaderStageFlags-parameter", + "text": " pushconstantShaderStageFlags must be a valid combination of VkShaderStageFlagBits values" }, { - "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-indirectStateFlags-parameter", + "text": " indirectStateFlags must be a valid combination of VkIndirectStateFlagBitsNV values" }, { - "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pIndexTypes-parameter", + "text": " If indexTypeCount is not 0, pIndexTypes must be a valid pointer to an array of indexTypeCount valid VkIndexType values" }, { - "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-buffer-parameter", - "text": " buffer must be a valid VkBuffer handle" + "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-pIndexTypeValues-parameter", + "text": " If indexTypeCount is not 0, pIndexTypeValues must be a valid pointer to an array of indexTypeCount uint32_t values" } ] }, - "VkObjectTableIndexBufferEntryNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-01371", - "text": " type must be VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX" - }, - { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783", - "text": " indexType must be VK_INDEX_TYPE_UINT16, or VK_INDEX_TYPE_UINT32" - }, - { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" - }, + "vkGetGeneratedCommandsMemoryRequirementsNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-deviceGeneratedCommands-02906", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" }, { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" + "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter", - "text": " buffer must be a valid VkBuffer handle" + "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkGeneratedCommandsMemoryRequirementsInfoNV structure" }, { - "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter", - "text": " indexType must be a valid VkIndexType value" + "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" } ] }, - "VkObjectTablePushConstantEntryNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkGeneratedCommandsMemoryRequirementsInfoNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-01372", - "text": " type must be VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX" + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-maxSequencesCount-02907", + "text": " maxSequencesCount must be less or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectSequenceCount" }, { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-parameter", - "text": " type must be a valid VkObjectEntryTypeNVX value" + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV" }, { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-parameter", - "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" - }, - { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-requiredbitmask", - "text": " flags must not be 0" - }, - { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-pipelineLayout-parameter", - "text": " pipelineLayout must be a valid VkPipelineLayout handle" - }, - { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-parameter", - "text": " stageFlags must be a valid combination of VkShaderStageFlagBits values" - }, - { - "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-requiredbitmask", - "text": " stageFlags must not be 0" + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-pNext-pNext", + "text": " pNext must be NULL" } ] }, - "vkUnregisterObjectsNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-01373", - "text": " At any pObjectIndices there must be a registered resource already." - }, - { - "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-01374", - "text": " The pObjectEntryTypes of the resource at pObjectIndices must match." - }, + "vkCmdExecuteGeneratedCommandsNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkUnregisterObjectsNVX-None-01375", - "text": " All operations on the device using the registered resource must have been completed." + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-isPreprocessed-02908", + "text": " If isPreprocessed is VK_TRUE then vkCmdPreprocessGeneratedCommandsNV must have already been executed on the device, using the same pGeneratedCommandsInfo content as well as the content of the input buffers it references (all except VkGeneratedCommandsInfoNV::preprocessBuffer). Furthermore pGeneratedCommandsInfo`s indirectCommandsLayout must have been created with the VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV bit set." }, { - "vuid": "VUID-vkUnregisterObjectsNVX-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parameter", - "text": " objectTable must be a valid VkObjectTableNVX handle" - }, - { - "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-parameter", - "text": " pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pipeline-02909", + "text": " VkGeneratedCommandsInfoNV::pipeline must match the current bound pipeline at VkGeneratedCommandsInfoNV::pipelineBindPoint." }, { - "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter", - "text": " pObjectIndices must be a valid pointer to an array of objectCount uint32_t values" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-deviceGeneratedCommands-02911", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" }, { - "vuid": "VUID-vkUnregisterObjectsNVX-objectCount-arraylength", - "text": " objectCount must be greater than 0" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parent", - "text": " objectTable must have been created, allocated, or retrieved from device" - } - ] - }, - "VkIndirectCommandsLayoutTokenNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-bindingUnit-01342", - "text": " bindingUnit must stay within device supported limits for the appropriate commands." + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pGeneratedCommandsInfo-parameter", + "text": " pGeneratedCommandsInfo must be a valid pointer to a valid VkGeneratedCommandsInfoNV structure" }, { - "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-dynamicCount-01343", - "text": " dynamicCount must stay within device supported limits for the appropriate commands." + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-divisor-01344", - "text": " divisor must be greater than 0 and a power of two." + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" }, { - "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter", - "text": " tokenType must be a valid VkIndirectCommandsTokenTypeNVX value" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-renderpass", + "text": " This command must only be called inside of a render pass instance" } - ] - }, - "VkIndirectCommandsTokenNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-01345", - "text": " The buffer’s usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set." - }, - { - "vuid": "VUID-VkIndirectCommandsTokenNVX-offset-01346", - "text": " The offset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minCommandsTokenBufferOffsetAlignment." - }, - { - "vuid": "VUID-VkIndirectCommandsTokenNVX-tokenType-parameter", - "text": " tokenType must be a valid VkIndirectCommandsTokenTypeNVX value" - }, + ], + "(VK_NV_device_generated_commands)+(VK_EXT_transform_feedback)": [ { - "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-parameter", - "text": " buffer must be a valid VkBuffer handle" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02910", + "text": " Transform feedback must not be active." } ] }, - "vkCreateIndirectCommandsLayoutNVX": { - "(VK_NVX_device_generated_commands)": [ + "VkGeneratedCommandsInfoNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter", - "text": " pCreateInfo must be a valid pointer to a valid VkIndirectCommandsLayoutCreateInfoNVX structure" - }, - { - "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipeline-02912", + "text": " The provided pipeline must match the pipeline bound at execution time." }, { - "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter", - "text": " pIndirectCommandsLayout must be a valid pointer to a VkIndirectCommandsLayoutNVX handle" - } - ] - }, - "VkIndirectCommandsLayoutCreateInfoNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-01347", - "text": " tokenCount must be greater than 0 and below VkDeviceGeneratedCommandsLimitsNVX::maxIndirectCommandsLayoutTokenCount" + "vuid": "VUID-VkGeneratedCommandsInfoNV-indirectCommandsLayout-02913", + "text": " If the indirectCommandsLayout uses a token of VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, then the pipeline must have been created with multiple shader groups." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-computeBindingPointSupport-01348", - "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, then pipelineBindPoint must not be VK_PIPELINE_BIND_POINT_COMPUTE" + "vuid": "VUID-VkGeneratedCommandsInfoNV-indirectCommandsLayout-02914", + "text": " If the indirectCommandsLayout uses a token of VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, then the pipeline must have been created with VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV set in VkGraphicsPipelineCreateInfo::flags." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01349", - "text": " If pTokens contains an entry of VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX it must be the first element of the array and there must be only a single element of such token type." + "vuid": "VUID-VkGeneratedCommandsInfoNV-indirectCommandsLayout-02915", + "text": " If the indirectCommandsLayout uses a token of VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, then the pipeline`s VkPipelineLayout must match the VkIndirectCommandsLayoutTokenNV::pushconstantPipelineLayout." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01350", - "text": " All state binding tokens in pTokens must occur prior work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX)." + "vuid": "VUID-VkGeneratedCommandsInfoNV-streamCount-02916", + "text": " streamCount must match the indirectCommandsLayout’s streamCount" }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01351", - "text": " The content of pTokens must include one single work provoking token that is compatible with the pipelineBindPoint." + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCount-02917", + "text": " sequencesCount must be less or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectSequenceCount and VkGeneratedCommandsMemoryRequirementsInfoNV::maxSequencesCount that was used to determine the preprocessSize." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX" + "vuid": "VUID-VkGeneratedCommandsInfoNV-preprocessBuffer-02918", + "text": " preprocessBuffer must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set in its usage flag." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext", - "text": " pNext must be NULL" + "vuid": "VUID-VkGeneratedCommandsInfoNV-preprocessOffset-02919", + "text": " preprocessOffset must be aligned to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::minIndirectCommandsBufferOffsetAlignment" }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter", - "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + "vuid": "VUID-VkGeneratedCommandsInfoNV-preprocessSize-02920", + "text": " preprocessSize must be at least equal to the memory requirement`s size returned by vkGetGeneratedCommandsMemoryRequirementsNV using the matching inputs (indirectCommandsLayout, …​) as within this structure." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter", - "text": " flags must be a valid combination of VkIndirectCommandsLayoutUsageFlagBitsNVX values" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCountBuffer-02921", + "text": " sequencesCountBuffer can be set if the actual used count of sequences is sourced from the provided buffer. In that case the sequencesCount serves as upper bound." }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask", - "text": " flags must not be 0" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCountBuffer-02922", + "text": " If sequencesCountBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter", - "text": " pTokens must be a valid pointer to an array of tokenCount valid VkIndirectCommandsLayoutTokenNVX structures" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCountBuffer-02923", + "text": " If sequencesCountBuffer is used, sequencesCountOffset must be aligned to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::minSequencesCountBufferOffsetAlignment" }, { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength", - "text": " tokenCount must be greater than 0" - } - ] - }, - "vkDestroyIndirectCommandsLayoutNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-01352", - "text": " All submitted commands that refer to indirectCommandsLayout must have completed execution" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesIndexBuffer-02924", + "text": " sequencesIndexBuffer must be set if indirectCommandsLayout’s VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV is set, otherwise it must be VK_NULL_HANDLE." }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01353", - "text": " If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesIndexBuffer-02925", + "text": " If sequencesIndexBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01354", - "text": " If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesIndexBuffer-02926", + "text": " If sequencesIndexBuffer is used, sequencesIndexOffset must be aligned to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::minSequencesIndexBufferOffsetAlignment" }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter", - "text": " device must be a valid VkDevice handle" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV" }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter", - "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" + "vuid": "VUID-VkGeneratedCommandsInfoNV-pNext-pNext", + "text": " pNext must be NULL" }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" }, { - "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent", - "text": " indirectCommandsLayout must have been created, allocated, or retrieved from device" - } - ] - }, - "vkCmdReserveSpaceForCommandsNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01329", - "text": " The provided commandBuffer must not have had a prior space reservation since its creation or the last reset." + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01330", - "text": " The state of the commandBuffer must be legal to execute all commands within the sequence provided by the indirectCommandsLayout member of pProcessCommandsInfo." + "vuid": "VUID-VkGeneratedCommandsInfoNV-indirectCommandsLayout-parameter", + "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNV handle" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" + "vuid": "VUID-VkGeneratedCommandsInfoNV-pStreams-parameter", + "text": " pStreams must be a valid pointer to an array of streamCount valid VkIndirectCommandsStreamNV structures" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter", - "text": " pReserveSpaceInfo must be a valid pointer to a valid VkCmdReserveSpaceForCommandsInfoNVX structure" + "vuid": "VUID-VkGeneratedCommandsInfoNV-preprocessBuffer-parameter", + "text": " preprocessBuffer must be a valid VkBuffer handle" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCountBuffer-parameter", + "text": " If sequencesCountBuffer is not VK_NULL_HANDLE, sequencesCountBuffer must be a valid VkBuffer handle" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesIndexBuffer-parameter", + "text": " If sequencesIndexBuffer is not VK_NULL_HANDLE, sequencesIndexBuffer must be a valid VkBuffer handle" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-renderpass", - "text": " This command must only be called inside of a render pass instance" + "vuid": "VUID-VkGeneratedCommandsInfoNV-streamCount-arraylength", + "text": " streamCount must be greater than 0" }, { - "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-bufferlevel", - "text": " commandBuffer must be a secondary VkCommandBuffer" + "vuid": "VUID-VkGeneratedCommandsInfoNV-commonparent", + "text": " Each of indirectCommandsLayout, pipeline, preprocessBuffer, sequencesCountBuffer, and sequencesIndexBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } ] }, - "VkCmdReserveSpaceForCommandsInfoNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX" - }, + "vkCmdPreprocessGeneratedCommandsNV": { + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter", - "text": " objectTable must be a valid VkObjectTableNVX handle" + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-pGeneratedCommandsInfo-02927", + "text": " pGeneratedCommandsInfo`s indirectCommandsLayout must have been created with the VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV bit set." }, { - "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter", - "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-deviceGeneratedCommands-02928", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNVdeviceGeneratedCommands feature must be enabled" }, { - "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent", - "text": " Both of indirectCommandsLayout, and objectTable must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "vkCmdProcessCommandsNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter", + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter", - "text": " pProcessCommandsInfo must be a valid pointer to a valid VkCmdProcessCommandsInfoNVX structure" + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-pGeneratedCommandsInfo-parameter", + "text": " pGeneratedCommandsInfo must be a valid pointer to a valid VkGeneratedCommandsInfoNV structure" }, { - "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording", + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-commandBuffer-recording", "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-cmdpool", + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-commandBuffer-cmdpool", "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" }, { - "vuid": "VUID-vkCmdProcessCommandsNVX-renderpass", - "text": " This command must only be called inside of a render pass instance" - } - ] - }, - "VkCmdProcessCommandsInfoNVX": { - "(VK_NVX_device_generated_commands)": [ - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-01331", - "text": " The provided objectTable must include all objects referenced by the generation process" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-01332", - "text": " indirectCommandsTokenCount must match the indirectCommandsLayout’s tokenCount" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-tokenType-01333", - "text": " The tokenType member of each entry in the pIndirectCommandsTokens array must match the values used at creation time of indirectCommandsLayout" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01334", - "text": " If targetCommandBuffer is provided, it must have reserved command space" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01335", - "text": " If targetCommandBuffer is provided, the objectTable must match the reservation’s objectTable and must have had all referenced objects registered at reservation time" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01336", - "text": " If targetCommandBuffer is provided, the indirectCommandsLayout must match the reservation’s indirectCommandsLayout" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01337", - "text": " If targetCommandBuffer is provided, the maxSequencesCount must not exceed the reservation’s maxSequencesCount" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01338", - "text": " If sequencesCountBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01339", - "text": " If sequencesCountBuffer is used, sequencesCountOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceCountBufferOffsetAlignment" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01340", - "text": " If sequencesIndexBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01341", - "text": " If sequencesIndexBuffer is used, sequencesIndexOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceIndexBufferOffsetAlignment" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter", - "text": " objectTable must be a valid VkObjectTableNVX handle" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter", - "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter", - "text": " pIndirectCommandsTokens must be a valid pointer to an array of indirectCommandsTokenCount valid VkIndirectCommandsTokenNVX structures" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter", - "text": " If targetCommandBuffer is not NULL, targetCommandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter", - "text": " If sequencesCountBuffer is not VK_NULL_HANDLE, sequencesCountBuffer must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter", - "text": " If sequencesIndexBuffer is not VK_NULL_HANDLE, sequencesIndexBuffer must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength", - "text": " indirectCommandsTokenCount must be greater than 0" - }, - { - "vuid": "VUID-VkCmdProcessCommandsInfoNVX-commonparent", - "text": " Each of indirectCommandsLayout, objectTable, sequencesCountBuffer, sequencesIndexBuffer, and targetCommandBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-renderpass", + "text": " This command must only be called outside of a render pass instance" } ] }, @@ -21741,6 +22361,10 @@ "text": " The subresource.arrayLayer member of each element of pBinds must be less than the arrayLayers specified in VkImageCreateInfo when image was created" }, { + "vuid": "VUID-VkSparseImageMemoryBindInfo-image-02901", + "text": " image must have been created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT set" + }, + { "vuid": "VUID-VkSparseImageMemoryBindInfo-image-parameter", "text": " image must be a valid VkImage handle" }, @@ -24262,8 +24886,116 @@ } ] }, + "VkDeferredOperationInfoKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-VkDeferredOperationInfoKHR-operationHandle-03433", + "text": " Any previous deferred operation that was associated with operationHandle must be complete" + }, + { + "vuid": "VUID-VkDeferredOperationInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR" + } + ] + }, + "vkCreateDeferredOperationKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCreateDeferredOperationKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateDeferredOperationKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDeferredOperationKHR-pDeferredOperation-parameter", + "text": " pDeferredOperation must be a valid pointer to a VkDeferredOperationKHR handle" + } + ] + }, + "vkDeferredOperationJoinKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkDeferredOperationJoinKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDeferredOperationJoinKHR-operation-parameter", + "text": " operation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkDeferredOperationJoinKHR-operation-parent", + "text": " operation must have been created, allocated, or retrieved from device" + } + ] + }, + "vkDestroyDeferredOperationKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-operation-03434", + "text": " If VkAllocationCallbacks were provided when operation was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-operation-03435", + "text": " If no VkAllocationCallbacks were provided when operation was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-operation-03436", + "text": " operation must be completed" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-operation-parameter", + "text": " operation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDeferredOperationKHR-operation-parent", + "text": " operation must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetDeferredOperationMaxConcurrencyKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkGetDeferredOperationMaxConcurrencyKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeferredOperationMaxConcurrencyKHR-operation-parameter", + "text": " operation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkGetDeferredOperationMaxConcurrencyKHR-operation-parent", + "text": " operation must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetDeferredOperationResultKHR": { + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkGetDeferredOperationResultKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeferredOperationResultKHR-operation-parameter", + "text": " operation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkGetDeferredOperationResultKHR-operation-parent", + "text": " operation must have been created, allocated, or retrieved from device" + } + ] + }, "vkCmdTraceRaysNV": { - "(VK_NV_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkCmdTraceRaysNV-None-02690", "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" @@ -24322,7 +25054,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456", - "text": " raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment" + "text": " raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02457", @@ -24330,7 +25062,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02458", - "text": " missShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment" + "text": " missShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459", @@ -24338,7 +25070,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460", - "text": " hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment" + "text": " hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461", @@ -24346,31 +25078,39 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462", - "text": " callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment" + "text": " callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02463", - "text": " missShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize" + "text": " missShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" }, { "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02464", - "text": " hitShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize" + "text": " hitShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" }, { "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465", - "text": " callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize" + "text": " callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" }, { "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02466", - "text": " missShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride" + "text": " missShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" }, { "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02467", - "text": " hitShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride" + "text": " hitShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" }, { "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468", - "text": " callableShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride" + "text": " callableShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-None-03429", + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-maxRecursionDepth-03430", + "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline." }, { "vuid": "VUID-vkCmdTraceRaysNV-width-02469", @@ -24421,19 +25161,19 @@ "text": " Each of callableShaderBindingTableBuffer, commandBuffer, hitShaderBindingTableBuffer, missShaderBindingTableBuffer, and raygenShaderBindingTableBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } ], - "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ { "vuid": "VUID-vkCmdTraceRaysNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" } ], - "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ { "vuid": "VUID-vkCmdTraceRaysNV-None-02693", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" } ], - "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ { "vuid": "VUID-vkCmdTraceRaysNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -24443,13 +25183,13 @@ "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" } ], - "(VK_NV_ray_tracing)+(VK_NV_corner_sampled_image)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_NV_corner_sampled_image)": [ { "vuid": "VUID-vkCmdTraceRaysNV-flags-02696", "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE." } ], - "(VK_NV_ray_tracing)+(VK_VERSION_1_1)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)+(VK_VERSION_1_1)": [ { "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02707", "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" @@ -24464,171 +25204,1523 @@ } ] }, - "vkCmdBuildAccelerationStructureNV": { - "(VK_NV_ray_tracing)": [ + "vkCmdTraceRaysKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", - "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" + "vuid": "VUID-vkCmdTraceRaysKHR-None-02690", + "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-02488", - "text": " dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs." + "vuid": "VUID-vkCmdTraceRaysKHR-None-02691", + "text": " If a VkImageView is accessed using atomic operations as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02489", - "text": " If update is VK_TRUE, src must not be VK_NULL_HANDLE" + "vuid": "VUID-vkCmdTraceRaysKHR-None-02697", + "text": " For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02490", - "text": " If update is VK_TRUE, src must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in VkAccelerationStructureInfoNV::flags" + "vuid": "VUID-vkCmdTraceRaysKHR-None-02698", + "text": " For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02491", - "text": " If update is VK_FALSE, The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + "vuid": "VUID-vkCmdTraceRaysKHR-None-02699", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02492", - "text": " If update is VK_TRUE, The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + "vuid": "VUID-vkCmdTraceRaysKHR-None-02700", + "text": " A valid pipeline must be bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02701", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02859", + "text": " There must not have been any calls to dynamic state setting commands for any state not specified as dynamic in the VkPipeline object bound to the pipeline bind point used by this command, since that pipeline was bound" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02702", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02703", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02704", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02705", + "text": " If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02706", + "text": " If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-raygenShaderBindingOffset-02455", + "text": " raygenShaderBindingOffset must be less than the size of raygenShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-raygenShaderBindingOffset-02456", + "text": " raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-missShaderBindingOffset-02457", + "text": " missShaderBindingOffset must be less than the size of missShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-missShaderBindingOffset-02458", + "text": " missShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-hitShaderBindingOffset-02459", + "text": " hitShaderBindingOffset must be less than the size of hitShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-hitShaderBindingOffset-02460", + "text": " hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-callableShaderBindingOffset-02461", + "text": " callableShaderBindingOffset must be less than the size of callableShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-callableShaderBindingOffset-02462", + "text": " callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-missShaderBindingStride-02463", + "text": " missShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-hitShaderBindingStride-02464", + "text": " hitShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-callableShaderBindingStride-02465", + "text": " callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-missShaderBindingStride-02466", + "text": " missShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-hitShaderBindingStride-02467", + "text": " hitShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-callableShaderBindingStride-02468", + "text": " callableShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-03429", + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-maxRecursionDepth-03430", + "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-width-03505", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-height-03506", + "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-depth-03507", + "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03508", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, the buffer member of hitShaderBindingTable must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03509", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, the buffer member of hitShaderBindingTable must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03510", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, the buffer member of hitShaderBindingTable must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03511", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, the shader group handle identified by missShaderBindingTable must contain a valid miss shader" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03512", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, entries in hitShaderBindingTable accessed as a result of this command in order to execute an any hit shader must not be set to zero" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03513", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, entries in hitShaderBindingTable accessed as a result of this command in order to execute a closest hit shader must not be set to zero" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03514", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, entries in hitShaderBindingTable accessed as a result of this command in order to execute an intersection shader must not be set to zero" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-pRaygenShaderBindingTable-parameter", + "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-parameter", + "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-parameter", + "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-parameter", + "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE." + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02712", + "text": " If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02713", + "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" + } + ] + }, + "VkStridedBufferRegionKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkStridedBufferRegionKHR-buffer-03515", + "text": " If buffer is not VK_NULL_HANDLE, size plus offset must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-VkStridedBufferRegionKHR-buffer-03516", + "text": " If buffer is not VK_NULL_HANDLE, stride must be less than the size of buffer" + }, + { + "vuid": "VUID-VkStridedBufferRegionKHR-buffer-parameter", + "text": " If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle" + } + ] + }, + "vkCmdTraceRaysIndirectKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02690", + "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02691", + "text": " If a VkImageView is accessed using atomic operations as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02697", + "text": " For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02698", + "text": " For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02699", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02700", + "text": " A valid pipeline must be bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02701", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02859", + "text": " There must not have been any calls to dynamic state setting commands for any state not specified as dynamic in the VkPipeline object bound to the pipeline bind point used by this command, since that pipeline was bound" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02702", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02703", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02704", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02705", + "text": " If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02706", + "text": " If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-raygenShaderBindingOffset-02455", + "text": " raygenShaderBindingOffset must be less than the size of raygenShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-raygenShaderBindingOffset-02456", + "text": " raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-missShaderBindingOffset-02457", + "text": " missShaderBindingOffset must be less than the size of missShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-missShaderBindingOffset-02458", + "text": " missShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-hitShaderBindingOffset-02459", + "text": " hitShaderBindingOffset must be less than the size of hitShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-hitShaderBindingOffset-02460", + "text": " hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-callableShaderBindingOffset-02461", + "text": " callableShaderBindingOffset must be less than the size of callableShaderBindingTableBuffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-callableShaderBindingOffset-02462", + "text": " callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-missShaderBindingStride-02463", + "text": " missShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-hitShaderBindingStride-02464", + "text": " hitShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-callableShaderBindingStride-02465", + "text": " callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-missShaderBindingStride-02466", + "text": " missShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-hitShaderBindingStride-02467", + "text": " hitShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-callableShaderBindingStride-02468", + "text": " callableShaderBindingStride must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-03429", + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-maxRecursionDepth-03430", + "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-02708", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-02709", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-02710", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-03517", + "text": " (offset + sizeof(VkTraceRaysIndirectCommandKHR)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-rayTracingIndirectTraceRays-03518", + "text": " the VkPhysicalDeviceRayTracingFeaturesKHRrayTracingIndirectTraceRays feature must be enabled" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRaygenShaderBindingTable-parameter", + "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-parameter", + "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-parameter", + "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-parameter", + "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE." + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02711", + "text": " commandBuffer must not be a protected command buffer" + } + ] + }, + "VkTraceRaysIndirectCommandKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-width-03519", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + }, + { + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-height-03520", + "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + }, + { + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-depth-03521", + "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + } + ] + }, + "vkCmdBuildAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", + "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-02488", + "text": " dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs." + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02489", + "text": " If update is VK_TRUE, src must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02490", + "text": " If update is VK_TRUE, src must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in VkAccelerationStructureInfoNV::flags" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02491", + "text": " If update is VK_FALSE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02492", + "text": " If update is VK_TRUE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-03522", + "text": " scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-03523", + "text": " If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03524", + "text": " If update is VK_TRUE, then objects that were previously active must not be made inactive as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03525", + "text": " If update is VK_TRUE, then objects that were previously inactive must not be made active as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03526", + "text": " If update is VK_TRUE, the src and dst objects must either be the same object or not have any memory aliasing" }, { "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure" + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", + "text": " If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", + "text": " If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", + "text": " scratch must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commonparent", + "text": " Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdBuildAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pOffsetInfos-03402", + "text": " pOffsetInfos[i] must be a valid pointer to an array of pInfos[i]→geometryCount VkAccelerationStructureBuildOffsetInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03403", + "text": " Each pInfos[i].srcAccelerationStructure must not refer to the same acceleration structure as any pInfos[i].dstAccelerationStructure that is provided to the same build command unless it is identical for an update" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03404", + "text": " For each pInfos[i], dstAccelerationStructure must have been created with compatible VkAccelerationStructureCreateInfoKHR where VkAccelerationStructureCreateInfoKHR::type and VkAccelerationStructureCreateInfoKHR::flags are identical to VkAccelerationStructureBuildGeometryInfoKHR::type and VkAccelerationStructureBuildGeometryInfoKHR::flags respectively, VkAccelerationStructureBuildGeometryInfoKHR::geometryCount for dstAccelerationStructure are greater than or equal to the build size, and each geometry in VkAccelerationStructureBuildGeometryInfoKHR::ppGeometries for dstAccelerationStructure has greater than or equal to the number of vertices, indices, and AABBs, VkAccelerationStructureGeometryTrianglesDataKHR::transformData is both 0 or both non-zero, and all other parameters are the same." + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03405", + "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously active for that acceleration structure must not be made inactive as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03406", + "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously inactive for that acceleration structure must not be made active as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03407", + "text": " Any acceleration structure instance in any top level build in this command must not reference any bottom level acceleration structure built by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03408", + "text": " There must not be any memory aliasing between the scratch memories that are provided in all the pInfos[i].scratchData memories for the acceleration structure builds" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03409", + "text": " There must not be any memory aliasing between memory bound to any top level, bottom level, or instance acceleration structure accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-parameter", + "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-ppOffsetInfos-parameter", + "text": " ppOffsetInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildOffsetInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-infoCount-arraylength", + "text": " infoCount must be greater than 0" + } + ], + "core": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-update-03527", + "text": " If update is VK_FALSE, all addresses between pInfos[i].scratchData and pInfos[i].scratchData + N - 1 must be in the buffer device address range of the same buffer, where N is given by the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with VkAccelerationStructureMemoryRequirementsInfoKHR::accelerationStructure set to pInfos[i].dstAccelerationStructure and VkAccelerationStructureMemoryRequirementsInfoKHR::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-update-03528", + "text": " If update is VK_TRUE, all addresses between pInfos[i].scratchData and pInfos[i].scratchData + N - 1 must be in the buffer device address range of the same buffer, where N is given by the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with VkAccelerationStructureMemoryRequirementsInfoKHR::accelerationStructure set to pInfos[i].dstAccelerationStructure and VkAccelerationStructureMemoryRequirementsInfoKHR::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03529", + "text": " The buffer from which the buffer device address pInfos[i].scratchData is queried must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03530", + "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03531", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" + } + ], + "(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pNext-03532", + "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of any of the provided VkAccelerationStructureBuildGeometryInfoKHR structures." + } + ] + }, + "vkCmdBuildAccelerationStructureIndirectKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-None-03533", + "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-None-03534", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-rayTracingIndirectAccelerationStructureBuild-03535", + "text": " The VkPhysicalDeviceRayTracingFeaturesKHRrayTracingIndirectAccelerationStructureBuild feature must be enabled" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureBuildGeometryInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-indirectBuffer-parameter", + "text": " indirectBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commonparent", + "text": " Both of commandBuffer, and indirectBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-pNext-03536", + "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of any of the provided VkAccelerationStructureBuildGeometryInfoKHR structures." + } + ] + }, + "VkAccelerationStructureBuildGeometryInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03537", + "text": " If update is VK_TRUE, srcAccelerationStructure must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03538", + "text": " If update is VK_TRUE, srcAccelerationStructure must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set in VkAccelerationStructureBuildGeometryInfoKHR::flags" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-scratchData-03539", + "text": " scratchData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR usage flag" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03540", + "text": " If update is enam:VK_TRUE, the srcAccelerationStructure and dstAccelerationStructure objects must either be the same object or not have any memory aliasing" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-parameter", + "text": " type must be a valid VkAccelerationStructureTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsKHR values" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-srcAccelerationStructure-parameter", + "text": " If srcAccelerationStructure is not VK_NULL_HANDLE, srcAccelerationStructure must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-dstAccelerationStructure-parameter", + "text": " dstAccelerationStructure must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-scratchData-parameter", + "text": " scratchData must be a valid VkDeviceOrHostAddressKHR union" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-commonparent", + "text": " Both of dstAccelerationStructure, and srcAccelerationStructure that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkAccelerationStructureGeometryKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03541", + "text": " If geometryType is VK_GEOMETRY_TYPE_AABBS_KHR, the aabbs member of geometry must be a valid VkAccelerationStructureGeometryAabbsDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03542", + "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, the triangles member of geometry must be a valid VkAccelerationStructureGeometryTrianglesDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03543", + "text": " If geometryType is VK_GEOMETRY_TYPE_INSTANCES_KHR, the instances member of geometry must be a valid VkAccelerationStructureGeometryInstancesDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-parameter", + "text": " geometryType must be a valid VkGeometryTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometry-parameter", + "text": " geometry must be a valid VkAccelerationStructureGeometryDataKHR union" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-flags-parameter", + "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" + } + ] + }, + "VkAccelerationStructureGeometryDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryDataKHR-triangles-parameter", + "text": " triangles must be a valid VkAccelerationStructureGeometryTrianglesDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryDataKHR-aabbs-parameter", + "text": " aabbs must be a valid VkAccelerationStructureGeometryAabbsDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryDataKHR-instances-parameter", + "text": " instances must be a valid VkAccelerationStructureGeometryInstancesDataKHR structure" + } + ] + }, + "VkAccelerationStructureGeometryTrianglesDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-parameter", + "text": " vertexFormat must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexData-parameter", + "text": " vertexData must be a valid VkDeviceOrHostAddressConstKHR union" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexData-parameter", + "text": " If indexData is not 0, indexData must be a valid VkDeviceOrHostAddressConstKHR union" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-transformData-parameter", + "text": " If transformData is not 0, transformData must be a valid VkDeviceOrHostAddressConstKHR union" + } + ] + }, + "VkAccelerationStructureGeometryAabbsDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-data-03544", + "text": " data must be aligned to 8 bytes" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-stride-03545", + "text": " stride must be a multiple of 8" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-data-parameter", + "text": " data must be a valid VkDeviceOrHostAddressConstKHR union" + } + ] + }, + "VkAabbPositionsKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAabbPositionsKHR-minX-03546", + "text": " minX must be less than or equal to maxX" + }, + { + "vuid": "VUID-VkAabbPositionsKHR-minY-03547", + "text": " minY must be less than or equal to maxY" + }, + { + "vuid": "VUID-VkAabbPositionsKHR-minZ-03548", + "text": " minZ must be less than or equal to maxZ" + } + ] + }, + "VkAccelerationStructureGeometryInstancesDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-data-03549", + "text": " data must be aligned to 16 bytes" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-arrayOfPointers-03550", + "text": " If arrayOfPointers is true, each pointer must be aligned to 16 bytes" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-data-parameter", + "text": " data must be a valid VkDeviceOrHostAddressConstKHR union" + } + ] + }, + "VkAccelerationStructureInstanceKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureInstanceKHR-flags-parameter", + "text": " flags must be a valid combination of VkGeometryInstanceFlagBitsKHR values" + } + ] + }, + "VkAccelerationStructureBuildOffsetInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03551", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry uses indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::indexData must be a multiple of the element size of VkAccelerationStructureGeometryTrianglesDataKHR::indexType" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03552", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry doesn’t use indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::vertexData must be a multiple of the component size of VkAccelerationStructureGeometryTrianglesDataKHR::vertexType" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-transformOffset-03553", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, the offset transformOffset from VkAccelerationStructureGeometryTrianglesDataKHR::transformData must be a multiple of 16" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03554", + "text": " For geometries of type VK_GEOMETRY_TYPE_AABBS_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryAabbsDataKHR::data must be a multiple of 8" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03555", + "text": " For geometries of type VK_GEOMETRY_TYPE_INSTANCES_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryInstancesDataKHR::data must be a multiple of 16 // TODO - Almost certainly should be more here" + } + ] + }, + "vkCmdWriteAccelerationStructuresPropertiesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02493", + "text": " queryPool must have been created with a queryType matching queryType" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02494", + "text": " The queries identified by queryPool and firstQuery must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", + "text": " All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-03432", + "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commonparent", + "text": " Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-03410", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-03411", + "text": " src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", + "text": " src must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", - "text": " If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle" + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-renderpass", + "text": " This command must only be called outside of a render pass instance" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commonparent", + "text": " Each of commandBuffer, dst, and src must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-None-03556", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory." }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", - "text": " If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", - "text": " scratch must be a valid VkBuffer handle" + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-recording", "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool", + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-cmdpool", "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-renderpass", + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-renderpass", "text": " This command must only be called outside of a render pass instance" - }, + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commonparent", - "text": " Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-pNext-03557", + "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyAccelerationStructureInfoKHR structure." } ] }, - "vkCmdWriteAccelerationStructuresPropertiesNV": { - "(VK_NV_ray_tracing)": [ + "VkCopyAccelerationStructureInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242", - "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-03410", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02493", - "text": " queryPool must have been created with a queryType matching queryType" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-03411", + "text": " src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02494", - "text": " The queries identified by queryPool and firstQuery must be unavailable" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructures-02495", - "text": " All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter", - "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-parameter", + "text": " src must be a valid VkAccelerationStructureKHR handle" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter", - "text": " queryType must be a valid VkQueryType value" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", - "text": " queryPool must be a valid VkQueryPool handle" + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-commonparent", + "text": " Both of dst, and src must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureToMemoryKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-None-03558", + "text": " All VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses." + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-None-03559", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory." + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-mode-03412", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording", + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-recording", "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-cmdpool", + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-cmdpool", "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass", + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-renderpass", "text": " This command must only be called outside of a render pass instance" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pNext-03560", + "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyAccelerationStructureToMemoryInfoKHR structure." + } + ] + }, + "VkCopyAccelerationStructureToMemoryInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-dst-03561", + "text": " The memory pointed to by dst must be at least as large as the serialization size of src, as reported by VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR." }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructureCount-arraylength", - "text": " accelerationStructureCount must be greater than 0" + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-03412", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR" }, { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent", - "text": " Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-src-parameter", + "text": " src must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-dst-parameter", + "text": " dst must be a valid VkDeviceOrHostAddressKHR union" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" } ] }, - "vkCmdCopyAccelerationStructureNV": { - "(VK_NV_ray_tracing)": [ + "vkCmdCopyMemoryToAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-02496", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-None-03562", + "text": " All VkDeviceOrHostAddressKHR referenced by this command must contain valid device addresses." }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-02497", - "text": " src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-None-03563", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory." }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-mode-03413", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-03414", + "text": " The data in pInfo->pname:src must have a format compatible with the destination physical device as returned by vkGetDeviceAccelerationStructureCompatibilityKHR" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", - "text": " src must be a valid VkAccelerationStructureNV handle" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", - "text": " mode must be a valid VkCopyAccelerationStructureModeNV value" + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording", + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-recording", "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool", + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-cmdpool", "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-renderpass", + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-renderpass", "text": " This command must only be called outside of a render pass instance" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pNext-03564", + "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyMemoryToAccelerationStructureInfoKHR structure." + } + ] + }, + "VkCopyMemoryToAccelerationStructureInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-03413", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR" }, { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commonparent", - "text": " Each of commandBuffer, dst, and src must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pInfo-03414", + "text": " The data in pInfo->pname:src must have a format compatible with the destination physical device as returned by vkGetDeviceAccelerationStructureCompatibilityKHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-src-parameter", + "text": " src must be a valid VkDeviceOrHostAddressConstKHR union" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + } + ] + }, + "vkGetDeviceAccelerationStructureCompatibilityKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-rayTracing-03565", + "text": " The rayTracing or rayQuery feature must be enabled" + }, + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-version-parameter", + "text": " version must be a valid pointer to a valid VkAccelerationStructureVersionKHR structure" + } + ] + }, + "VkAccelerationStructureVersionKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureVersionKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureVersionKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureVersionKHR-versionData-parameter", + "text": " versionData must be a valid pointer to an array of 2*VK_UUID_SIZE uint8_t values" + } + ] + }, + "vkBuildAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pOffsetInfos-03402", + "text": " pOffsetInfos[i] must be a valid pointer to an array of pInfos[i]→geometryCount VkAccelerationStructureBuildOffsetInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03403", + "text": " Each pInfos[i].srcAccelerationStructure must not refer to the same acceleration structure as any pInfos[i].dstAccelerationStructure that is provided to the same build command unless it is identical for an update" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03404", + "text": " For each pInfos[i], dstAccelerationStructure must have been created with compatible VkAccelerationStructureCreateInfoKHR where VkAccelerationStructureCreateInfoKHR::type and VkAccelerationStructureCreateInfoKHR::flags are identical to VkAccelerationStructureBuildGeometryInfoKHR::type and VkAccelerationStructureBuildGeometryInfoKHR::flags respectively, VkAccelerationStructureBuildGeometryInfoKHR::geometryCount for dstAccelerationStructure are greater than or equal to the build size, and each geometry in VkAccelerationStructureBuildGeometryInfoKHR::ppGeometries for dstAccelerationStructure has greater than or equal to the number of vertices, indices, and AABBs, VkAccelerationStructureGeometryTrianglesDataKHR::transformData is both 0 or both non-zero, and all other parameters are the same." + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03405", + "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously active for that acceleration structure must not be made inactive as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03406", + "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously inactive for that acceleration structure must not be made active as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03407", + "text": " Any acceleration structure instance in any top level build in this command must not reference any bottom level acceleration structure built by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03408", + "text": " There must not be any memory aliasing between the scratch memories that are provided in all the pInfos[i].scratchData memories for the acceleration structure builds" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03409", + "text": " There must not be any memory aliasing between memory bound to any top level, bottom level, or instance acceleration structure accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-parameter", + "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-ppOffsetInfos-parameter", + "text": " ppOffsetInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildOffsetInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-infoCount-arraylength", + "text": " infoCount must be greater than 0" + } + ], + "core": [ + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03437", + "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid host addresses" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03438", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory" + }, + { + "vuid": "VUID-vkBuildAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03439", + "text": " The VkPhysicalDeviceRayTracingFeaturesKHRrayTracingHostAccelerationStructureCommands feature must be enabled" + } + ] + }, + "vkCopyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-None-03440", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory." + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03441", + "text": " the VkPhysicalDeviceRayTracingFeaturesKHRrayTracingHostAccelerationStructureCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" + } + ] + }, + "vkCopyMemoryToAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-None-03442", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory." + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-None-03443", + "text": " All VkDeviceOrHostAddressConstKHR referenced by this command must contain valid host pointers." + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03444", + "text": " the VkPhysicalDeviceRayTracingFeaturesKHRrayTracingHostAccelerationStructureCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" + } + ] + }, + "vkCopyAccelerationStructureToMemoryKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-None-03445", + "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory." + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-None-03446", + "text": " All VkDeviceOrHostAddressKHR referenced by this command must contain valid host pointers." + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-rayTracingHostAccelerationStructureCommands-03447", + "text": " the VkPhysicalDeviceRayTracingFeaturesKHRrayTracingHostAccelerationStructureCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" + } + ] + }, + "vkWriteAccelerationStructuresPropertiesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03448", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03449", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then data must point to a VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03450", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03451", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then data must point to a VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-03452", + "text": " dataSize must be greater than or equal to accelerationStructureCount*stride" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-03453", + "text": " The acceleration structures referenced by pAccelerationStructures must be bound to host-visible memory" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", + "text": " All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03432", + "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parent", + "text": " Each element of pAccelerationStructures must have been created, allocated, or retrieved from device" + } + ], + "core": [ + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-rayTracingHostAccelerationStructureCommands-03454", + "text": " the VkPhysicalDeviceRayTracingFeaturesKHRrayTracingHostAccelerationStructureCommands feature must be enabled" } ] }, @@ -25180,6 +27272,42 @@ } ] }, + "VkPhysicalDeviceRayTracingFeaturesKHR": { + "(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkPhysicalDeviceRayTracingFeaturesKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03348", + "text": " If rayTracingShaderGroupHandleCaptureReplayMixed is VK_TRUE, rayTracingShaderGroupHandleCaptureReplay must also be VK_TRUE" + }, + { + "vuid": "VUID-VkPhysicalDeviceRayTracingFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV" + } + ] + }, + "VkPhysicalDeviceDiagnosticsConfigFeaturesNV": { + "(VK_NV_device_diagnostics_config)": [ + { + "vuid": "VUID-VkPhysicalDeviceDiagnosticsConfigFeaturesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV" + } + ] + }, + "VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT": { + "(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT" + } + ] + }, "VkPhysicalDevicePushDescriptorPropertiesKHR": { "(VK_KHR_push_descriptor)": [ { @@ -25404,6 +27532,14 @@ } ] }, + "VkPhysicalDeviceRayTracingPropertiesKHR": { + "(VK_KHR_ray_tracing)": [ + { + "vuid": "VUID-VkPhysicalDeviceRayTracingPropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR" + } + ] + }, "VkPhysicalDeviceCooperativeMatrixPropertiesNV": { "(VK_NV_cooperative_matrix)": [ { @@ -25444,6 +27580,14 @@ } ] }, + "VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV": { + "(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV" + } + ] + }, "vkGetPhysicalDeviceMultisamplePropertiesEXT": { "(VK_EXT_sample_locations)": [ { diff --git a/registry/vk.xml b/registry/vk.xml index b7f8e45..20faf42 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -57,6 +57,7 @@ server. + @@ -156,7 +157,7 @@ server. // Vulkan 1.2 version number #define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 134 +#define VK_HEADER_VERSION 135 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) @@ -210,7 +211,7 @@ typedef void CAMetalLayer; typedef VkFlags VkRenderPassCreateFlags; typedef VkFlags VkSamplerCreateFlags; typedef VkFlags VkPipelineLayoutCreateFlags; - typedef VkFlags VkPipelineCacheCreateFlags; + typedef VkFlags VkPipelineCacheCreateFlags; typedef VkFlags VkPipelineDepthStencilStateCreateFlags; typedef VkFlags VkPipelineDynamicStateCreateFlags; typedef VkFlags VkPipelineColorBlendStateCreateFlags; @@ -264,12 +265,14 @@ typedef void CAMetalLayer; typedef VkFlags VkDescriptorPoolResetFlags; typedef VkFlags VkDependencyFlags; typedef VkFlags VkSubgroupFeatureFlags; - typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - typedef VkFlags VkObjectEntryUsageFlagsNVX; - typedef VkFlags VkGeometryFlagsNV; - typedef VkFlags VkGeometryInstanceFlagsNV; - typedef VkFlags VkBuildAccelerationStructureFlagsNV; - + typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; + typedef VkFlags VkIndirectStateFlagsNV; + typedef VkFlags VkGeometryFlagsKHR; + + typedef VkFlags VkGeometryInstanceFlagsKHR; + + typedef VkFlags VkBuildAccelerationStructureFlagsKHR; + typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; typedef VkFlags VkPipelineCreationFeedbackFlagsEXT; @@ -279,6 +282,7 @@ typedef void CAMetalLayer; typedef VkFlags VkPipelineCompilerControlFlagsAMD; typedef VkFlags VkShaderCorePropertiesFlagsAMD; + typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; WSI extensions typedef VkFlags VkCompositeAlphaFlagsKHR; @@ -374,15 +378,16 @@ typedef void CAMetalLayer; VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) WSI extensions VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) @@ -483,10 +488,9 @@ typedef void CAMetalLayer; Extensions - - - - + + + @@ -508,17 +512,27 @@ typedef void CAMetalLayer; - - - - - - - - + + + + + + + + + + + + + + + + + + @@ -1897,105 +1911,111 @@ typedef void CAMetalLayer; const VkDeviceMemory* pReleaseSyncs const uint64_t* pReleaseKeys - - VkStructureType sType - const void* pNext - VkBool32 computeBindingPointSupport - - - VkStructureType sType - const void* pNext - uint32_t maxIndirectCommandsLayoutTokenCount - uint32_t maxObjectEntryCounts - uint32_t minSequenceCountBufferOffsetAlignment - uint32_t minSequenceIndexBufferOffsetAlignment - uint32_t minCommandsTokenBufferOffsetAlignment - - - VkIndirectCommandsTokenTypeNVX tokenType - VkBuffer bufferbuffer containing tableEntries and additional data for indirectCommands - VkDeviceSize offsetoffset from the base address of the buffer - - - VkIndirectCommandsTokenTypeNVX tokenType - uint32_t bindingUnitBinding unit for vertex attribute / descriptor set, offset for pushconstants - uint32_t dynamicCountNumber of variable dynamic values for descriptor set / push constants - uint32_t divisorRate the which the array is advanced per element (must be power of 2, minimum 1) - - - VkStructureType sType - const void* pNext - VkPipelineBindPoint pipelineBindPoint - VkIndirectCommandsLayoutUsageFlagsNVX flags - uint32_t tokenCount - const VkIndirectCommandsLayoutTokenNVX* pTokens - - - VkStructureType sType - const void* pNext - VkObjectTableNVX objectTable - VkIndirectCommandsLayoutNVX indirectCommandsLayout - uint32_t indirectCommandsTokenCount - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens - uint32_t maxSequencesCount - VkCommandBuffer targetCommandBuffer - VkBuffer sequencesCountBuffer - VkDeviceSize sequencesCountOffset - VkBuffer sequencesIndexBuffer - VkDeviceSize sequencesIndexOffset - - - VkStructureType sType - const void* pNext - VkObjectTableNVX objectTable - VkIndirectCommandsLayoutNVX indirectCommandsLayout - uint32_t maxSequencesCount + + VkStructureTypesType + void* pNext + VkBool32 deviceGeneratedCommands - - VkStructureType sType - const void* pNext - uint32_t objectCount - const VkObjectEntryTypeNVX* pObjectEntryTypes - const uint32_t* pObjectEntryCounts - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags - - uint32_t maxUniformBuffersPerDescriptor - uint32_t maxStorageBuffersPerDescriptor - uint32_t maxStorageImagesPerDescriptor - uint32_t maxSampledImagesPerDescriptor - uint32_t maxPipelineLayouts - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - VkPipeline pipeline - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - VkPipelineLayout pipelineLayout - VkDescriptorSet descriptorSet - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - VkBuffer buffer - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - VkBuffer buffer - VkIndexType indexType - - - VkObjectEntryTypeNVX type - VkObjectEntryUsageFlagsNVX flags - VkPipelineLayout pipelineLayout - VkShaderStageFlags stageFlags + + VkStructureType sType + const void* pNext + uint32_t maxGraphicsShaderGroupCount + uint32_t maxIndirectSequenceCount + uint32_t maxIndirectCommandsTokenCount + uint32_t maxIndirectCommandsStreamCount + uint32_t maxIndirectCommandsTokenOffset + uint32_t maxIndirectCommandsStreamStride + uint32_t minSequencesCountBufferOffsetAlignment + uint32_t minSequencesIndexBufferOffsetAlignment + uint32_t minIndirectCommandsBufferOffsetAlignment + + + VkStructureType sType + const void* pNext + uint32_t stageCount + const VkPipelineShaderStageCreateInfo* pStages + const VkPipelineVertexInputStateCreateInfo* pVertexInputState + const VkPipelineTessellationStateCreateInfo* pTessellationState + + + VkStructureType sType + const void* pNext + uint32_t groupCount + const VkGraphicsShaderGroupCreateInfoNV* pGroups + uint32_t pipelineCount + const VkPipeline* pPipelines + + + uint32_t groupIndex + + + VkDeviceAddress bufferAddress + uint32_t size + VkIndexType indexType + + + VkDeviceAddress bufferAddress + uint32_t size + uint32_t stride + + + uint32_t data + + + VkBuffer buffer + VkDeviceSize offset + + + VkStructureType sType + const void* pNext + VkIndirectCommandsTokenTypeNV tokenType + uint32_t stream + uint32_t offset + uint32_t vertexBindingUnit + VkBool32 vertexDynamicStride + VkPipelineLayout pushconstantPipelineLayout + VkShaderStageFlags pushconstantShaderStageFlags + uint32_t pushconstantOffset + uint32_t pushconstantSize + VkIndirectStateFlagsNV indirectStateFlags + uint32_t indexTypeCount + const VkIndexType* pIndexTypes + const uint32_t* pIndexTypeValues + + + VkStructureType sType + const void* pNext + VkIndirectCommandsLayoutUsageFlagsNV flags + VkPipelineBindPoint pipelineBindPoint + uint32_t tokenCount + const VkIndirectCommandsLayoutTokenNV* pTokens + uint32_t streamCount + const uint32_t* pStreamStrides + + + VkStructureType sType + const void* pNext + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + VkIndirectCommandsLayoutNV indirectCommandsLayout + uint32_t streamCount + const VkIndirectCommandsStreamNV* pStreams + uint32_t sequencesCount + VkBuffer preprocessBuffer + VkDeviceSize preprocessOffset + VkDeviceSize preprocessSize + VkBuffer sequencesCountBuffer + VkDeviceSize sequencesCountOffset + VkBuffer sequencesIndexBuffer + VkDeviceSize sequencesIndexOffset + + + VkStructureType sType + const void* pNext + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + VkIndirectCommandsLayoutNV indirectCommandsLayout + uint32_t maxSequencesCount VkStructureType sType @@ -3704,11 +3724,21 @@ typedef void CAMetalLayer; VkStructureType sType const void* pNext - VkRayTracingShaderGroupTypeNV type + VkRayTracingShaderGroupTypeKHR type + uint32_t generalShader + uint32_t closestHitShader + uint32_t anyHitShader + uint32_t intersectionShader + + + VkStructureType sType + const void* pNext + VkRayTracingShaderGroupTypeKHR type uint32_t generalShader uint32_t closestHitShader uint32_t anyHitShader uint32_t intersectionShader + const void* pShaderGroupCaptureReplayHandle VkStructureType sType @@ -3723,6 +3753,21 @@ typedef void CAMetalLayer; VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of + + VkStructureType sType + const void* pNext + VkPipelineCreateFlags flagsPipeline creation flags + uint32_t stageCount + const VkPipelineShaderStageCreateInfo* pStagesOne entry for each active shader stage + uint32_t groupCount + const VkRayTracingShaderGroupCreateInfoKHR* pGroups + uint32_t maxRecursionDepth + VkPipelineLibraryCreateInfoKHR libraries + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface + VkPipelineLayout layoutInterface layout of the pipeline + VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of + int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of + VkStructureType sType const void* pNext @@ -3752,16 +3797,16 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext - VkGeometryTypeNV geometryType - VkGeometryDataNV geometry - VkGeometryFlagsNV flags + const void* pNext + VkGeometryTypeKHR geometryType + VkGeometryDataNV geometry + VkGeometryFlagsKHR flags VkStructureType sType const void* pNext VkAccelerationStructureTypeNV type - VkBuildAccelerationStructureFlagsNVflags + VkBuildAccelerationStructureFlagsNVflags uint32_t instanceCount uint32_t geometryCount const VkGeometryNV* pGeometries @@ -3772,26 +3817,61 @@ typedef void CAMetalLayer; VkDeviceSize compactedSize VkAccelerationStructureInfoNV info - - VkStructureType sType + + VkStructureType sType const void* pNext - VkAccelerationStructureNV accelerationStructure + VkAccelerationStructureKHR accelerationStructure VkDeviceMemory memory VkDeviceSize memoryOffset uint32_t deviceIndexCount const uint32_t* pDeviceIndices - - VkStructureType sType + + + VkStructureType sType const void* pNext uint32_t accelerationStructureCount - const VkAccelerationStructureNV* pAccelerationStructures + const VkAccelerationStructureKHR* pAccelerationStructures + + + + VkStructureType sType + const void* pNext + VkAccelerationStructureMemoryRequirementsTypeKHR type + VkAccelerationStructureBuildTypeKHR buildType + VkAccelerationStructureKHR accelerationStructure VkStructureType sType const void* pNext - VkAccelerationStructureMemoryRequirementsTypeNV type - VkAccelerationStructureNV accelerationStructure + VkAccelerationStructureMemoryRequirementsTypeNV type + VkAccelerationStructureNV accelerationStructure + + + VkStructureType sType + void* pNext + VkBool32 rayTracing + VkBool32 rayTracingShaderGroupHandleCaptureReplay + VkBool32 rayTracingShaderGroupHandleCaptureReplayMixed + VkBool32 rayTracingAccelerationStructureCaptureReplay + VkBool32 rayTracingIndirectTraceRays + VkBool32 rayTracingIndirectAccelerationStructureBuild + VkBool32 rayTracingHostAccelerationStructureCommands + VkBool32 rayQuery + VkBool32 rayTracingPrimitiveCulling + + + VkStructureType sType + void* pNext + uint32_t shaderGroupHandleSize + uint32_t maxRecursionDepth + uint32_t maxShaderGroupStride + uint32_t shaderGroupBaseAlignment + uint64_t maxGeometryCount + uint64_t maxInstanceCount + uint64_t maxPrimitiveCount + uint32_t maxDescriptorSetAccelerationStructures + uint32_t shaderGroupHandleCaptureReplaySize VkStructureType sType @@ -3805,6 +3885,17 @@ typedef void CAMetalLayer; uint64_t maxTriangleCount uint32_t maxDescriptorSetAccelerationStructures + + VkBuffer buffer + VkDeviceSize offset + VkDeviceSize stride + VkDeviceSize size + + + uint32_t width + uint32_t height + uint32_t depth + VkStructureType sType void* pNext @@ -4042,7 +4133,7 @@ typedef void CAMetalLayer; VkPipelineCreationFeedbackFlagsEXT flags uint64_t duration - + VkStructureType sType const void* pNext VkPipelineCreationFeedbackEXT* pPipelineCreationFeedbackOutput pipeline creation feedback. @@ -4161,11 +4252,12 @@ typedef void CAMetalLayer; const void* pNext void* pUserData - + VkStructureType sType const void* pNext VkQueryPoolSamplingModeINTEL performanceCountersSampling + VkStructureType sType const void* pNext @@ -4355,6 +4447,11 @@ typedef void CAMetalLayer; uint32_t lineStippleFactor uint16_t lineStipplePattern + + VkStructureType sType + void* pNext + VkBool32 pipelineCreationCacheControl + VkStructureTypesType void* pNext @@ -4516,6 +4613,158 @@ typedef void CAMetalLayer; char description[VK_MAX_DESCRIPTION_SIZE] char layer[VK_MAX_EXTENSION_NAME_SIZE] + + VkDeviceAddress deviceAddress + void* hostAddress + + + VkDeviceAddress deviceAddress + const void* hostAddress + + + VkStructureType sType + const void* pNext + VkFormat vertexFormat + VkDeviceOrHostAddressConstKHR vertexData + VkDeviceSize vertexStride + VkIndexType indexType + VkDeviceOrHostAddressConstKHR indexData + VkDeviceOrHostAddressConstKHR transformData + + + VkStructureType sType + const void* pNext + VkDeviceOrHostAddressConstKHR data + VkDeviceSize stride + + + VkStructureType sType + const void* pNext + VkBool32 arrayOfPointers + VkDeviceOrHostAddressConstKHR data + + + VkAccelerationStructureGeometryTrianglesDataKHR triangles + VkAccelerationStructureGeometryAabbsDataKHR aabbs + VkAccelerationStructureGeometryInstancesDataKHR instances + + + VkStructureType sType + const void* pNext + VkGeometryTypeKHR geometryType + VkAccelerationStructureGeometryDataKHR geometry + VkGeometryFlagsKHR flags + + + VkStructureType sType + const void* pNext + VkAccelerationStructureTypeKHR type + VkBuildAccelerationStructureFlagsKHR flags + VkBool32 update + VkAccelerationStructureKHR srcAccelerationStructure + VkAccelerationStructureKHR dstAccelerationStructure + VkBool32 geometryArrayOfPointers + uint32_t geometryCount + const VkAccelerationStructureGeometryKHR* const* ppGeometries + VkDeviceOrHostAddressKHR scratchData + + + uint32_t primitiveCount + uint32_t primitiveOffset + uint32_t firstVertex + uint32_t transformOffset + + + VkStructureType sType + const void* pNext + VkGeometryTypeKHR geometryType + uint32_t maxPrimitiveCount + VkIndexType indexType + uint32_t maxVertexCount + VkFormat vertexFormat + VkBool32 allowsTransforms + + + VkStructureType sType + const void* pNext + VkDeviceSize compactedSize + VkAccelerationStructureTypeKHR type + VkBuildAccelerationStructureFlagsKHR flags + uint32_t maxGeometryCount + const VkAccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos + VkDeviceAddress deviceAddress + + + float minX + float minY + float minZ + float maxX + float maxY + float maxZ + + + + float matrix[3][4] + + + + VkTransformMatrixKHR transform + uint32_t instanceCustomIndex:24 + uint32_t mask:8 + uint32_t instanceShaderBindingTableRecordOffset:24 + VkGeometryInstanceFlagsKHR flags:8 + uint64_t accelerationStructureReference + + + + VkStructureType sType + const void* pNext + VkAccelerationStructureKHR accelerationStructure + + + VkStructureType sType + const void* pNext + const uint8_t* versionData + + + VkStructureType sType + const void* pNext + VkAccelerationStructureKHR src + VkAccelerationStructureKHR dst + VkCopyAccelerationStructureModeKHR mode + + + VkStructureType sType + const void* pNext + VkAccelerationStructureKHR src + VkDeviceOrHostAddressKHR dst + VkCopyAccelerationStructureModeKHR mode + + + VkStructureType sType + const void* pNext + VkDeviceOrHostAddressConstKHR src + VkAccelerationStructureKHR dst + VkCopyAccelerationStructureModeKHR mode + + + VkStructureType sType + const void* pNext + uint32_t maxPayloadSize + uint32_t maxAttributeSize + uint32_t maxCallableSize + + + VkStructureType sType + const void* pNext + VkDeferredOperationKHR operationHandle + + + VkStructureType sType + const void* pNext + uint32_t libraryCount + const VkPipeline* pLibraries + VkStructureType sType void* pNextPointer to next structure @@ -4527,6 +4776,16 @@ typedef void CAMetalLayer; VkSurfaceTransformFlagBitsKHR transform VkRect2D renderArea + + VkStructureTypesType + void* pNext + VkBool32 diagnosticsConfig + + + VkStructureType sType + const void* pNext + VkDeviceDiagnosticsConfigFlagsNV flags + Vulkan enumerant (token) definitions @@ -4558,7 +4817,8 @@ typedef void CAMetalLayer; - + + @@ -4654,6 +4914,7 @@ typedef void CAMetalLayer; + @@ -5408,8 +5669,8 @@ typedef void CAMetalLayer; - - + + @@ -5436,6 +5697,7 @@ typedef void CAMetalLayer; + @@ -5456,32 +5718,23 @@ typedef void CAMetalLayer; - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + @@ -5709,44 +5962,51 @@ typedef void CAMetalLayer; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -5774,6 +6034,11 @@ typedef void CAMetalLayer; + + + + + @@ -7111,63 +7376,42 @@ typedef void CAMetalLayer; HANDLE* pHandle - void vkCmdProcessCommandsNVX + void vkCmdExecuteGeneratedCommandsNV VkCommandBuffer commandBuffer - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo + VkBool32 isPreprocessed + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo - - void vkCmdReserveSpaceForCommandsNVX + + void vkCmdPreprocessGeneratedCommandsNV VkCommandBuffer commandBuffer - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo - - VkResult vkCreateIndirectCommandsLayoutNVX - VkDevice device - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo - const VkAllocationCallbacks* pAllocator - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout + + void vkCmdBindPipelineShaderGroupNV + VkCommandBuffer commandBuffer + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + uint32_t groupIndex - void vkDestroyIndirectCommandsLayoutNVX + void vkGetGeneratedCommandsMemoryRequirementsNV VkDevice device - VkIndirectCommandsLayoutNVX indirectCommandsLayout - const VkAllocationCallbacks* pAllocator + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo + VkMemoryRequirements2* pMemoryRequirements - VkResult vkCreateObjectTableNVX + VkResult vkCreateIndirectCommandsLayoutNV VkDevice device - const VkObjectTableCreateInfoNVX* pCreateInfo + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo const VkAllocationCallbacks* pAllocator - VkObjectTableNVX* pObjectTable + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout - void vkDestroyObjectTableNVX + void vkDestroyIndirectCommandsLayoutNV VkDevice device - VkObjectTableNVX objectTable + VkIndirectCommandsLayoutNV indirectCommandsLayout const VkAllocationCallbacks* pAllocator - - VkResult vkRegisterObjectsNVX - VkDevice device - VkObjectTableNVX objectTable - uint32_t objectCount - const VkObjectTableEntryNVX* const* ppObjectTableEntries - const uint32_t* pObjectIndices - - - VkResult vkUnregisterObjectsNVX - VkDevice device - VkObjectTableNVX objectTable - uint32_t objectCount - const VkObjectEntryTypeNVX* pObjectEntryTypes - const uint32_t* pObjectIndices - - - void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX - VkPhysicalDevice physicalDevice - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures - VkDeviceGeneratedCommandsLimitsNVX* pLimits - void vkGetPhysicalDeviceFeatures2 VkPhysicalDevice physicalDevice @@ -8006,11 +8250,18 @@ typedef void CAMetalLayer; VkAccelerationStructureNV* pAccelerationStructure - void vkDestroyAccelerationStructureNV + void vkDestroyAccelerationStructureKHR VkDevice device - VkAccelerationStructureNV accelerationStructure + VkAccelerationStructureKHR accelerationStructure const VkAllocationCallbacks* pAllocator + + + void vkGetAccelerationStructureMemoryRequirementsKHR + VkDevice device + const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo + VkMemoryRequirements2* pMemoryRequirements + void vkGetAccelerationStructureMemoryRequirementsNV VkDevice device @@ -8018,27 +8269,59 @@ typedef void CAMetalLayer; VkMemoryRequirements2KHR* pMemoryRequirements - VkResult vkBindAccelerationStructureMemoryNV + VkResult vkBindAccelerationStructureMemoryKHR VkDevice device uint32_t bindInfoCount - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos + void vkCmdCopyAccelerationStructureNV VkCommandBuffer commandBuffer - VkAccelerationStructureNV dst - VkAccelerationStructureNV src - VkCopyAccelerationStructureModeNV mode + VkAccelerationStructureKHR dst + VkAccelerationStructureKHR src + VkCopyAccelerationStructureModeKHR mode + + + void vkCmdCopyAccelerationStructureKHR + VkCommandBuffer commandBuffer + const VkCopyAccelerationStructureInfoKHR* pInfo + + + VkResult vkCopyAccelerationStructureKHR + VkDevice device + const VkCopyAccelerationStructureInfoKHR* pInfo + + + void vkCmdCopyAccelerationStructureToMemoryKHR + VkCommandBuffer commandBuffer + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo + + + VkResult vkCopyAccelerationStructureToMemoryKHR + VkDevice device + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo + + + void vkCmdCopyMemoryToAccelerationStructureKHR + VkCommandBuffer commandBuffer + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo + + + VkResult vkCopyMemoryToAccelerationStructureKHR + VkDevice device + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo - void vkCmdWriteAccelerationStructuresPropertiesNV + void vkCmdWriteAccelerationStructuresPropertiesKHR VkCommandBuffer commandBuffer uint32_t accelerationStructureCount - const VkAccelerationStructureNV* pAccelerationStructures + const VkAccelerationStructureKHR* pAccelerationStructures VkQueryType queryType VkQueryPool queryPool uint32_t firstQuery + void vkCmdBuildAccelerationStructureNV VkCommandBuffer commandBuffer @@ -8046,11 +8329,32 @@ typedef void CAMetalLayer; VkBuffer instanceData VkDeviceSize instanceOffset VkBool32 update - VkAccelerationStructureNV dst - VkAccelerationStructureNV src + VkAccelerationStructureKHR dst + VkAccelerationStructureKHR src VkBuffer scratch VkDeviceSize scratchOffset + + VkResult vkWriteAccelerationStructuresPropertiesKHR + VkDevice device + uint32_t accelerationStructureCount + const VkAccelerationStructureKHR* pAccelerationStructures + VkQueryType queryType + size_t dataSize + void* pData + size_t stride + + + void vkCmdTraceRaysKHR + VkCommandBuffer commandBuffer + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable + const VkStridedBufferRegionKHR* pMissShaderBindingTable + const VkStridedBufferRegionKHR* pHitShaderBindingTable + const VkStridedBufferRegionKHR* pCallableShaderBindingTable + uint32_t width + uint32_t height + uint32_t depth + void vkCmdTraceRaysNV VkCommandBuffer commandBuffer @@ -8070,7 +8374,17 @@ typedef void CAMetalLayer; uint32_t depth - VkResult vkGetRayTracingShaderGroupHandlesNV + VkResult vkGetRayTracingShaderGroupHandlesKHR + VkDevice device + VkPipeline pipeline + uint32_t firstGroup + uint32_t groupCount + size_t dataSize + void* pData + + + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR VkDevice device VkPipeline pipeline uint32_t firstGroup @@ -8081,7 +8395,7 @@ typedef void CAMetalLayer; VkResult vkGetAccelerationStructureHandleNV VkDevice device - VkAccelerationStructureNV accelerationStructure + VkAccelerationStructureKHR accelerationStructure size_t dataSize void* pData @@ -8094,12 +8408,36 @@ typedef void CAMetalLayer; const VkAllocationCallbacks* pAllocator VkPipeline* pPipelines + + VkResult vkCreateRayTracingPipelinesKHR + VkDevice device + VkPipelineCache pipelineCache + uint32_t createInfoCount + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos + const VkAllocationCallbacks* pAllocator + VkPipeline* pPipelines + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV VkPhysicalDevice physicalDevice uint32_t* pPropertyCount VkCooperativeMatrixPropertiesNV* pProperties + + void vkCmdTraceRaysIndirectKHR + VkCommandBuffer commandBuffer + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable + const VkStridedBufferRegionKHR* pMissShaderBindingTable + const VkStridedBufferRegionKHR* pHitShaderBindingTable + const VkStridedBufferRegionKHR* pCallableShaderBindingTable + VkBuffer buffer + VkDeviceSize offset + + + VkResult vkGetDeviceAccelerationStructureCompatibilityKHR + VkDevice device + const VkAccelerationStructureVersionKHR* version + uint32_t vkGetImageViewHandleNVX VkDevice device @@ -8268,6 +8606,67 @@ typedef void CAMetalLayer; uint32_t* pToolCount VkPhysicalDeviceToolPropertiesEXT* pToolProperties + + VkResult vkCreateAccelerationStructureKHR + VkDevice device + const VkAccelerationStructureCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkAccelerationStructureKHR* pAccelerationStructure + + + void vkCmdBuildAccelerationStructureKHR + VkCommandBuffer commandBuffer + uint32_t infoCount + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos + + + void vkCmdBuildAccelerationStructureIndirectKHR + VkCommandBuffer commandBuffer + const VkAccelerationStructureBuildGeometryInfoKHR* pInfo + VkBuffer indirectBuffer + VkDeviceSize indirectOffset + uint32_t indirectStride + + + VkResult vkBuildAccelerationStructureKHR + VkDevice device + uint32_t infoCount + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos + + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR + VkDevice device + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo + + + VkResult vkCreateDeferredOperationKHR + VkDevice device + const VkAllocationCallbacks* pAllocator + VkDeferredOperationKHR* pDeferredOperation + + + void vkDestroyDeferredOperationKHR + VkDevice device + VkDeferredOperationKHR operation + const VkAllocationCallbacks* pAllocator + + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR + VkDevice device + VkDeferredOperationKHR operation + + + VkResult vkGetDeferredOperationResultKHR + VkDevice device + VkDeferredOperationKHR operation + + + VkResult vkDeferredOperationJoinKHR + VkDevice device + VkDeferredOperationKHR operation + @@ -10053,52 +10452,10 @@ typedef void CAMetalLayer; - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -10858,16 +11215,123 @@ typedef void CAMetalLayer; - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -11127,63 +11591,89 @@ typedef void CAMetalLayer; - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + + - + + + + + + + + + + + + - + + + + + + + + @@ -11628,7 +12118,7 @@ typedef void CAMetalLayer; - + @@ -11647,6 +12137,7 @@ typedef void CAMetalLayer; + @@ -11982,7 +12473,7 @@ typedef void CAMetalLayer; - + @@ -12188,10 +12679,23 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + + + + + @@ -12263,11 +12767,48 @@ typedef void CAMetalLayer; - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -12352,12 +12893,13 @@ typedef void CAMetalLayer; - + - - - - + + + + + @@ -12397,14 +12939,21 @@ typedef void CAMetalLayer; - + - - - - - - + + + + + + + + @@ -12419,10 +12968,16 @@ typedef void CAMetalLayer; - + - - + + + + + + + + @@ -12463,8 +13018,8 @@ typedef void CAMetalLayer; - - + + @@ -12484,20 +13039,80 @@ typedef void CAMetalLayer; - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - \ No newline at end of file + -- 2.7.4