#include <initializer_list>
#include <string>
#include <system_error>
+#include <tuple>
#include <type_traits>
#include <vulkan/vulkan.h>
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
# include <vector>
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-static_assert( VK_HEADER_VERSION == 34 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 37 , "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
# endif
#endif
+
+#if !defined(VULKAN_HPP_INLINE)
+# if defined(__clang___)
+# if __has_attribute(always_inline)
+# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# else
+# define VULKAN_HPP_INLINE inline
+# endif
+# elif defined(__GNUC__)
+# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# elif defined(_MSC_VER)
+# define VULKAN_HPP_INLINE __forceinline
+# else
+# define VULKAN_HPP_INLINE inline
+# endif
+#endif
+
namespace vk
{
+ template <typename FlagBitsType> struct FlagTraits
+ {
+ enum { allFlags = 0 };
+ };
+
template <typename BitType, typename MaskType = VkFlags>
class Flags
{
return !m_mask;
}
+ Flags<BitType> operator~() const
+ {
+ Flags<BitType> result(*this);
+ result.m_mask ^= FlagTraits<BitType>::allFlags;
+ return result;
+ }
+
bool operator==(Flags<BitType> const& rhs) const
{
return m_mask == rhs.m_mask;
return flags ^ bit;
}
+
template <typename RefType>
class Optional
{
public:
Optional(RefType & reference) { m_ptr = &reference; }
+ Optional(RefType * ptr) { m_ptr = ptr; }
Optional(std::nullptr_t) { m_ptr = nullptr; }
operator RefType*() const { return m_ptr; }
eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV
};
- inline std::string to_string(Result value)
+ VULKAN_HPP_INLINE std::string to_string(Result value)
{
switch (value)
{
# undef noexcept
#endif
- inline const std::error_category& errorCategory()
+ VULKAN_HPP_INLINE const std::error_category& errorCategory()
{
static ErrorCategoryImpl instance;
return instance;
}
- inline std::error_code make_error_code(Result e)
+ VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
{
return std::error_code(static_cast<int>(e), errorCategory());
}
- inline std::error_condition make_error_condition(Result e)
+ VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
{
return std::error_condition(static_cast<int>(e), errorCategory());
}
Result result;
T value;
+
+ operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
};
template <typename T>
#endif
};
- inline ResultValueType<void>::type createResultValue( Result result, char const * message )
+ VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
assert( result == Result::eSuccess );
}
template <typename T>
- inline typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+ VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
assert( result == Result::eSuccess );
#endif
}
- inline Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
+ VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
}
template <typename T>
- inline ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+ VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
- inline FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
{
return FramebufferCreateFlags( bit0 ) | bit1;
}
using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
- inline QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
{
return QueryPoolCreateFlags( bit0 ) | bit1;
}
using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
- inline RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
{
return RenderPassCreateFlags( bit0 ) | bit1;
}
using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
- inline SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
{
return SamplerCreateFlags( bit0 ) | bit1;
}
using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
- inline PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
{
return PipelineLayoutCreateFlags( bit0 ) | bit1;
}
using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
- inline PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
{
return PipelineCacheCreateFlags( bit0 ) | bit1;
}
using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
- inline PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
{
return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
}
using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
- inline PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
{
return PipelineDynamicStateCreateFlags( bit0 ) | bit1;
}
using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
- inline PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
{
return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
}
using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
- inline PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
{
return PipelineMultisampleStateCreateFlags( bit0 ) | bit1;
}
using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
- inline PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
{
return PipelineRasterizationStateCreateFlags( bit0 ) | bit1;
}
using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
- inline PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
{
return PipelineViewportStateCreateFlags( bit0 ) | bit1;
}
using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
- inline PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
{
return PipelineTessellationStateCreateFlags( bit0 ) | bit1;
}
using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
- inline PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
{
return PipelineInputAssemblyStateCreateFlags( bit0 ) | bit1;
}
using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
- inline PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
{
return PipelineVertexInputStateCreateFlags( bit0 ) | bit1;
}
using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
- inline PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
{
return PipelineShaderStageCreateFlags( bit0 ) | bit1;
}
using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
- inline DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
{
return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
}
using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
- inline BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
{
return BufferViewCreateFlags( bit0 ) | bit1;
}
using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
- inline InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
{
return InstanceCreateFlags( bit0 ) | bit1;
}
using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
- inline DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
{
return DeviceCreateFlags( bit0 ) | bit1;
}
using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
- inline DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
{
return DeviceQueueCreateFlags( bit0 ) | bit1;
}
using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
- inline ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
{
return ImageViewCreateFlags( bit0 ) | bit1;
}
using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
- inline SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
{
return SemaphoreCreateFlags( bit0 ) | bit1;
}
using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
- inline ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
{
return ShaderModuleCreateFlags( bit0 ) | bit1;
}
using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
- inline EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
{
return EventCreateFlags( bit0 ) | bit1;
}
using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
- inline MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
+ VULKAN_HPP_INLINE MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
{
return MemoryMapFlags( bit0 ) | bit1;
}
using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
- inline SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
+ VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
{
return SubpassDescriptionFlags( bit0 ) | bit1;
}
using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
- inline DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
+ VULKAN_HPP_INLINE DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
{
return DescriptorPoolResetFlags( bit0 ) | bit1;
}
using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
- inline SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
{
return SwapchainCreateFlagsKHR( bit0 ) | bit1;
}
using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
- inline DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
{
return DisplayModeCreateFlagsKHR( bit0 ) | bit1;
}
using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
- inline DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
{
return DisplaySurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
- inline AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
{
return AndroidSurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_MIR_KHR
using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
- inline MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
{
return MirSurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
- inline WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
{
return WaylandSurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
- inline Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
{
return Win32SurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_XLIB_KHR
using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
- inline XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
{
return XlibSurfaceCreateFlagsKHR( bit0 ) | bit1;
}
#ifdef VK_USE_PLATFORM_XCB_KHR
using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
- inline XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
+ VULKAN_HPP_INLINE XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
{
return XcbSurfaceCreateFlagsKHR( bit0 ) | bit1;
}
};
static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+ class ObjectTableNVX
+ {
+ public:
+ ObjectTableNVX()
+ : m_objectTableNVX(VK_NULL_HANDLE)
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ObjectTableNVX(VkObjectTableNVX objectTableNVX)
+ : m_objectTableNVX(objectTableNVX)
+ {}
+
+ ObjectTableNVX& operator=(VkObjectTableNVX objectTableNVX)
+ {
+ m_objectTableNVX = objectTableNVX;
+ return *this;
+ }
+#endif
+
+ bool operator==(ObjectTableNVX const &rhs) const
+ {
+ return m_objectTableNVX == rhs.m_objectTableNVX;
+ }
+
+ bool operator!=(ObjectTableNVX const &rhs) const
+ {
+ return m_objectTableNVX != rhs.m_objectTableNVX;
+ }
+
+ bool operator<(ObjectTableNVX const &rhs) const
+ {
+ return m_objectTableNVX < rhs.m_objectTableNVX;
+ }
+
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ explicit
+#endif
+ operator VkObjectTableNVX() const
+ {
+ return m_objectTableNVX;
+ }
+
+ explicit operator bool() const
+ {
+ return m_objectTableNVX != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_objectTableNVX == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkObjectTableNVX m_objectTableNVX;
+ };
+ static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+
+ class IndirectCommandsLayoutNVX
+ {
+ public:
+ IndirectCommandsLayoutNVX()
+ : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ IndirectCommandsLayoutNVX(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+ : m_indirectCommandsLayoutNVX(indirectCommandsLayoutNVX)
+ {}
+
+ IndirectCommandsLayoutNVX& operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+ {
+ m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
+ return *this;
+ }
+#endif
+
+ bool operator==(IndirectCommandsLayoutNVX const &rhs) const
+ {
+ return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+ }
+
+ bool operator!=(IndirectCommandsLayoutNVX const &rhs) const
+ {
+ return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+ }
+
+ bool operator<(IndirectCommandsLayoutNVX const &rhs) const
+ {
+ return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+ }
+
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ explicit
+#endif
+ operator VkIndirectCommandsLayoutNVX() const
+ {
+ return m_indirectCommandsLayoutNVX;
+ }
+
+ explicit operator bool() const
+ {
+ return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
+ };
+ static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
+
class DisplayKHR
{
public:
using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
- inline CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
+ VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
{
return CullModeFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
+ {
+ return ~( CullModeFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CullModeFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+ };
+ };
+
enum class FrontFace
{
eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
- eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT
+ eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+ eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
+ eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
+ eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
+ eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
+ eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
+ eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
};
struct ApplicationInfo
static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- enum class SubpassContents
- {
- eInline = VK_SUBPASS_CONTENTS_INLINE,
- eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
- };
-
- struct PresentInfoKHR
+ struct DeviceGeneratedCommandsFeaturesNVX
{
- PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
- : sType( StructureType::ePresentInfoKHR )
+ DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
+ : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
, pNext( nullptr )
- , waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , swapchainCount( swapchainCount_ )
- , pSwapchains( pSwapchains_ )
- , pImageIndices( pImageIndices_ )
- , pResults( pResults_ )
+ , computeBindingPointSupport( computeBindingPointSupport_ )
{
}
- PresentInfoKHR( VkPresentInfoKHR const & rhs )
+ DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+ memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
}
- PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+ DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+ memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
return *this;
}
- PresentInfoKHR& setSType( StructureType sType_ )
+ DeviceGeneratedCommandsFeaturesNVX& setSType( StructureType sType_ )
{
sType = sType_;
return *this;
}
- PresentInfoKHR& setPNext( const void* pNext_ )
+ DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
{
pNext = pNext_;
return *this;
}
- PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
{
- waitSemaphoreCount = waitSemaphoreCount_;
+ computeBindingPointSupport = computeBindingPointSupport_;
return *this;
}
- PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+ operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
{
- pWaitSemaphores = pWaitSemaphores_;
+ return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
+ }
+
+ bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+ }
+
+ bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ Bool32 computeBindingPointSupport;
+ };
+ static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
+
+ struct DeviceGeneratedCommandsLimitsNVX
+ {
+ DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
+ : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
+ , pNext( nullptr )
+ , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+ , maxObjectEntryCounts( maxObjectEntryCounts_ )
+ , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+ , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+ , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+ {
+ }
+
+ DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
return *this;
}
- PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+ DeviceGeneratedCommandsLimitsNVX& setSType( StructureType sType_ )
{
- swapchainCount = swapchainCount_;
+ sType = sType_;
return *this;
}
- PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+ DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
{
- pSwapchains = pSwapchains_;
+ pNext = pNext_;
return *this;
}
- PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+ DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
{
- pImageIndices = pImageIndices_;
+ maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
return *this;
}
- PresentInfoKHR& setPResults( Result* pResults_ )
+ DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
{
- pResults = pResults_;
+ maxObjectEntryCounts = maxObjectEntryCounts_;
return *this;
}
- operator const VkPresentInfoKHR&() const
+ DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
{
- return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+ minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+ return *this;
}
- bool operator==( PresentInfoKHR const& rhs ) const
+ DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
+ {
+ minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
+ {
+ minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+ return *this;
+ }
+
+ operator const VkDeviceGeneratedCommandsLimitsNVX&() const
+ {
+ return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
+ }
+
+ bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
- && ( pWaitSemaphores == rhs.pWaitSemaphores )
- && ( swapchainCount == rhs.swapchainCount )
- && ( pSwapchains == rhs.pSwapchains )
- && ( pImageIndices == rhs.pImageIndices )
- && ( pResults == rhs.pResults );
+ && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
+ && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
+ && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
+ && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
+ && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
}
- bool operator!=( PresentInfoKHR const& rhs ) const
+ bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
{
return !operator==( rhs );
}
public:
const void* pNext;
- uint32_t waitSemaphoreCount;
- const Semaphore* pWaitSemaphores;
- uint32_t swapchainCount;
- const SwapchainKHR* pSwapchains;
- const uint32_t* pImageIndices;
- Result* pResults;
- };
- static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
-
- enum class DynamicState
- {
- eViewport = VK_DYNAMIC_STATE_VIEWPORT,
- eScissor = VK_DYNAMIC_STATE_SCISSOR,
- eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
- eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
- eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
- eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
- eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
- eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
- eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
+ uint32_t maxIndirectCommandsLayoutTokenCount;
+ uint32_t maxObjectEntryCounts;
+ uint32_t minSequenceCountBufferOffsetAlignment;
+ uint32_t minSequenceIndexBufferOffsetAlignment;
+ uint32_t minCommandsTokenBufferOffsetAlignment;
};
+ static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
- struct PipelineDynamicStateCreateInfo
+ struct CmdReserveSpaceForCommandsInfoNVX
{
- PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
- : sType( StructureType::ePipelineDynamicStateCreateInfo )
+ CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
+ : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
, pNext( nullptr )
- , flags( flags_ )
- , dynamicStateCount( dynamicStateCount_ )
- , pDynamicStates( pDynamicStates_ )
+ , objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , maxSequencesCount( maxSequencesCount_ )
{
}
- PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+ CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+ memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
}
- PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+ CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+ memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
return *this;
}
- PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
+ CmdReserveSpaceForCommandsInfoNVX& setSType( StructureType sType_ )
{
sType = sType_;
return *this;
}
- PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+ CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
{
pNext = pNext_;
return *this;
}
- PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+ CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
{
- flags = flags_;
+ objectTable = objectTable_;
return *this;
}
- PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+ CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
{
- dynamicStateCount = dynamicStateCount_;
+ indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
- PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+ CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
{
- pDynamicStates = pDynamicStates_;
+ maxSequencesCount = maxSequencesCount_;
return *this;
}
- operator const VkPipelineDynamicStateCreateInfo&() const
+ operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
{
- return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+ return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
}
- bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+ bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( dynamicStateCount == rhs.dynamicStateCount )
- && ( pDynamicStates == rhs.pDynamicStates );
+ && ( objectTable == rhs.objectTable )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( maxSequencesCount == rhs.maxSequencesCount );
}
- bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+ bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
{
return !operator==( rhs );
}
public:
const void* pNext;
- PipelineDynamicStateCreateFlags flags;
- uint32_t dynamicStateCount;
- const DynamicState* pDynamicStates;
+ ObjectTableNVX objectTable;
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ uint32_t maxSequencesCount;
};
- static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
- enum class QueueFlagBits
+ enum class SubpassContents
{
- eGraphics = VK_QUEUE_GRAPHICS_BIT,
- eCompute = VK_QUEUE_COMPUTE_BIT,
- eTransfer = VK_QUEUE_TRANSFER_BIT,
- eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
+ eInline = VK_SUBPASS_CONTENTS_INLINE,
+ eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
};
- using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
-
- inline QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
- {
- return QueueFlags( bit0 ) | bit1;
- }
-
- struct QueueFamilyProperties
+ struct PresentInfoKHR
{
- operator const VkQueueFamilyProperties&() const
- {
- return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
- }
-
- bool operator==( QueueFamilyProperties const& rhs ) const
- {
+ PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
+ : sType( StructureType::ePresentInfoKHR )
+ , pNext( nullptr )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , swapchainCount( swapchainCount_ )
+ , pSwapchains( pSwapchains_ )
+ , pImageIndices( pImageIndices_ )
+ , pResults( pResults_ )
+ {
+ }
+
+ PresentInfoKHR( VkPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+ }
+
+ PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PresentInfoKHR) );
+ return *this;
+ }
+
+ PresentInfoKHR& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ {
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+ {
+ pWaitSemaphores = pWaitSemaphores_;
+ return *this;
+ }
+
+ PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+ {
+ pSwapchains = pSwapchains_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+ {
+ pImageIndices = pImageIndices_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPResults( Result* pResults_ )
+ {
+ pResults = pResults_;
+ return *this;
+ }
+
+ operator const VkPresentInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+ }
+
+ bool operator==( PresentInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pSwapchains == rhs.pSwapchains )
+ && ( pImageIndices == rhs.pImageIndices )
+ && ( pResults == rhs.pResults );
+ }
+
+ bool operator!=( PresentInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ uint32_t waitSemaphoreCount;
+ const Semaphore* pWaitSemaphores;
+ uint32_t swapchainCount;
+ const SwapchainKHR* pSwapchains;
+ const uint32_t* pImageIndices;
+ Result* pResults;
+ };
+ static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class DynamicState
+ {
+ eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+ eScissor = VK_DYNAMIC_STATE_SCISSOR,
+ eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+ eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+ eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+ eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+ eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+ eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+ eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
+ };
+
+ struct PipelineDynamicStateCreateInfo
+ {
+ PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
+ : sType( StructureType::ePipelineDynamicStateCreateInfo )
+ , pNext( nullptr )
+ , flags( flags_ )
+ , dynamicStateCount( dynamicStateCount_ )
+ , pDynamicStates( pDynamicStates_ )
+ {
+ }
+
+ PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+ }
+
+ PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+ {
+ dynamicStateCount = dynamicStateCount_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+ {
+ pDynamicStates = pDynamicStates_;
+ return *this;
+ }
+
+ operator const VkPipelineDynamicStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( dynamicStateCount == rhs.dynamicStateCount )
+ && ( pDynamicStates == rhs.pDynamicStates );
+ }
+
+ bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ PipelineDynamicStateCreateFlags flags;
+ uint32_t dynamicStateCount;
+ const DynamicState* pDynamicStates;
+ };
+ static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class QueueFlagBits
+ {
+ eGraphics = VK_QUEUE_GRAPHICS_BIT,
+ eCompute = VK_QUEUE_COMPUTE_BIT,
+ eTransfer = VK_QUEUE_TRANSFER_BIT,
+ eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
+ };
+
+ using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+ VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
+ {
+ return QueueFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
+ {
+ return ~( QueueFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueueFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
+ };
+ };
+
+ struct QueueFamilyProperties
+ {
+ operator const VkQueueFamilyProperties&() const
+ {
+ return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
+ }
+
+ bool operator==( QueueFamilyProperties const& rhs ) const
+ {
return ( queueFlags == rhs.queueFlags )
&& ( queueCount == rhs.queueCount )
&& ( timestampValidBits == rhs.timestampValidBits )
using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
- inline MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
+ VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
{
return MemoryPropertyFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
+ {
+ return ~( MemoryPropertyFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<MemoryPropertyFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
+ };
+ };
+
struct MemoryType
{
operator const VkMemoryType&() const
using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
- inline MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
+ VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
{
return MemoryHeapFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
+ {
+ return ~( MemoryHeapFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<MemoryHeapFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal)
+ };
+ };
+
struct MemoryHeap
{
operator const VkMemoryHeap&() const
eHostRead = VK_ACCESS_HOST_READ_BIT,
eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
- eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT
+ eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+ eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+ eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX
};
using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
- inline AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
+ VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
{
return AccessFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
+ {
+ return ~( AccessFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<AccessFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX)
+ };
+ };
+
struct MemoryBarrier
{
MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
- inline BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
+ VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
{
return BufferUsageFlags( bit0 ) | bit1;
}
- enum class BufferCreateFlagBits
+ VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
{
- eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
- eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+ return ~( BufferUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<BufferUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
+ };
+ };
+
+ enum class BufferCreateFlagBits
+ {
+ eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
};
using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
- inline BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
{
return BufferCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
+ {
+ return ~( BufferCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<BufferCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
+ };
+ };
+
struct BufferCreateInfo
{
BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
- inline ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
+ VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
{
return ShaderStageFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
+ {
+ return ~( ShaderStageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ShaderStageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
+ };
+ };
+
struct DescriptorSetLayoutBinding
{
DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
- inline ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
+ VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
{
return ImageUsageFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
+ {
+ return ~( ImageUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
+ };
+ };
+
enum class ImageCreateFlagBits
{
eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
- inline ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
{
return ImageCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
+ {
+ return ~( ImageCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible)
+ };
+ };
+
enum class PipelineCreateFlagBits
{
eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
- inline PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
{
return PipelineCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
+ {
+ return ~( PipelineCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<PipelineCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative)
+ };
+ };
+
struct ComputePipelineCreateInfo
{
ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
- inline ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
+ VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
{
return ColorComponentFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
+ {
+ return ~( ColorComponentFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ColorComponentFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+ };
+ };
+
struct PipelineColorBlendAttachmentState
{
PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
- inline FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
{
return FenceCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
+ {
+ return ~( FenceCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<FenceCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+ };
+ };
+
struct FenceCreateInfo
{
FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
- inline FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
+ VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
{
return FormatFeatureFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
+ {
+ return ~( FormatFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<FormatFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG)
+ };
+ };
+
struct FormatProperties
{
operator const VkFormatProperties&() const
using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
- inline QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
+ VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
{
return QueryControlFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
+ {
+ return ~( QueryControlFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryControlFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+ };
+ };
+
enum class QueryResultFlagBits
{
e64 = VK_QUERY_RESULT_64_BIT,
using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
- inline QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
+ VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
{
return QueryResultFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
+ {
+ return ~( QueryResultFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryResultFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+ };
+ };
+
enum class CommandBufferUsageFlagBits
{
eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
- inline CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
+ VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
{
return CommandBufferUsageFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
+ {
+ return ~( CommandBufferUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandBufferUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+ };
+ };
+
enum class QueryPipelineStatisticFlagBits
{
eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
- inline QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
+ VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
{
return QueryPipelineStatisticFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
+ {
+ return ~( QueryPipelineStatisticFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+ };
+ };
+
struct CommandBufferInheritanceInfo
{
CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
- inline ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
+ VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
{
return ImageAspectFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
+ {
+ return ~( ImageAspectFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageAspectFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
+ };
+ };
+
struct ImageSubresource
{
ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
- inline SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
+ VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
{
return SparseImageFormatFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
+ {
+ return ~( SparseImageFormatFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SparseImageFormatFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+ };
+ };
+
struct SparseImageFormatProperties
{
operator const VkSparseImageFormatProperties&() const
using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
- inline SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
+ VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
{
return SparseMemoryBindFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
+ {
+ return ~( SparseMemoryBindFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SparseMemoryBindFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+ };
+ };
+
struct SparseMemoryBind
{
SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
eHost = VK_PIPELINE_STAGE_HOST_BIT,
eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
- eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT
+ eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
};
using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
- inline PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
+ VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
{
return PipelineStageFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
+ {
+ return ~( PipelineStageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<PipelineStageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
+ };
+ };
+
enum class CommandPoolCreateFlagBits
{
eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
- inline CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
{
return CommandPoolCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
+ {
+ return ~( CommandPoolCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandPoolCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
+ };
+ };
+
struct CommandPoolCreateInfo
{
CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
- inline CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
+ VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
{
return CommandPoolResetFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
+ {
+ return ~( CommandPoolResetFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandPoolResetFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+ };
+ };
+
enum class CommandBufferResetFlagBits
{
eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
- inline CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
+ VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
{
return CommandBufferResetFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
+ {
+ return ~( CommandBufferResetFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandBufferResetFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+ };
+ };
+
enum class SampleCountFlagBits
{
e1 = VK_SAMPLE_COUNT_1_BIT,
using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
- inline SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
+ VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
{
return SampleCountFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
+ {
+ return ~( SampleCountFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SampleCountFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+ };
+ };
+
struct ImageFormatProperties
{
operator const VkImageFormatProperties&() const
using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
- inline AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
+ VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
{
return AttachmentDescriptionFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
+ {
+ return ~( AttachmentDescriptionFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+ };
+ };
+
struct AttachmentDescription
{
AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
- inline StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
+ VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
{
return StencilFaceFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
+ {
+ return ~( StencilFaceFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<StencilFaceFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
+ };
+ };
+
enum class DescriptorPoolCreateFlagBits
{
eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
- inline DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
+ VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
{
return DescriptorPoolCreateFlags( bit0 ) | bit1;
}
+ VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
+ {
+ return ~( DescriptorPoolCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
+ };
+ };
+
struct DescriptorPoolCreateInfo
{
DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
- inline DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
+ VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
{
return DependencyFlags( bit0 ) | bit1;
}
- class CommandBuffer
+ VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
{
- public:
- CommandBuffer()
- : m_commandBuffer(VK_NULL_HANDLE)
- {}
+ return ~( DependencyFlags( bits ) );
+ }
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- CommandBuffer(VkCommandBuffer commandBuffer)
- : m_commandBuffer(commandBuffer)
- {}
+ template <> struct FlagTraits<DependencyFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DependencyFlagBits::eByRegion)
+ };
+ };
- CommandBuffer& operator=(VkCommandBuffer commandBuffer)
+ struct SubpassDependency
+ {
+ SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
+ : srcSubpass( srcSubpass_ )
+ , dstSubpass( dstSubpass_ )
+ , srcStageMask( srcStageMask_ )
+ , dstStageMask( dstStageMask_ )
+ , srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , dependencyFlags( dependencyFlags_ )
{
- m_commandBuffer = commandBuffer;
- return *this;
}
-#endif
- bool operator==(CommandBuffer const &rhs) const
+ SubpassDependency( VkSubpassDependency const & rhs )
{
- return m_commandBuffer == rhs.m_commandBuffer;
+ memcpy( this, &rhs, sizeof(SubpassDependency) );
}
- bool operator!=(CommandBuffer const &rhs) const
+ SubpassDependency& operator=( VkSubpassDependency const & rhs )
{
- return m_commandBuffer != rhs.m_commandBuffer;
+ memcpy( this, &rhs, sizeof(SubpassDependency) );
+ return *this;
}
- bool operator<(CommandBuffer const &rhs) const
+ SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
{
- return m_commandBuffer < rhs.m_commandBuffer;
+ srcSubpass = srcSubpass_;
+ return *this;
}
- Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
+ SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
{
- return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+ dstSubpass = dstSubpass_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
+ SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
{
- Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
- return createResultValue( result, "vk::CommandBuffer::begin" );
+ srcStageMask = srcStageMask_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- Result end( ) const
+ SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
{
- return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+ dstStageMask = dstStageMask_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type end() const
+ SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
{
- Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
- return createResultValue( result, "vk::CommandBuffer::end" );
+ srcAccessMask = srcAccessMask_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- Result reset( CommandBufferResetFlags flags ) const
+ SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
{
- return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ dstAccessMask = dstAccessMask_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
+ SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
{
- Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
- return createResultValue( result, "vk::CommandBuffer::reset" );
+ dependencyFlags = dependencyFlags_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
+ operator const VkSubpassDependency&() const
{
- vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ return *reinterpret_cast<const VkSubpassDependency*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
+ bool operator==( SubpassDependency const& rhs ) const
{
- vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ return ( srcSubpass == rhs.srcSubpass )
+ && ( dstSubpass == rhs.dstSubpass )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( dependencyFlags == rhs.dependencyFlags );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
+ bool operator!=( SubpassDependency const& rhs ) const
{
- vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+ return !operator==( rhs );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
+ uint32_t srcSubpass;
+ uint32_t dstSubpass;
+ PipelineStageFlags srcStageMask;
+ PipelineStageFlags dstStageMask;
+ AccessFlags srcAccessMask;
+ AccessFlags dstAccessMask;
+ DependencyFlags dependencyFlags;
+ };
+ static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+
+ struct RenderPassCreateInfo
+ {
+ RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
+ : sType( StructureType::eRenderPassCreateInfo )
+ , pNext( nullptr )
+ , flags( flags_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , subpassCount( subpassCount_ )
+ , pSubpasses( pSubpasses_ )
+ , dependencyCount( dependencyCount_ )
+ , pDependencies( pDependencies_ )
{
- vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
+ RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
{
- vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+ memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
+ RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
{
- vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+ memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setLineWidth( float lineWidth ) const
+ RenderPassCreateInfo& setSType( StructureType sType_ )
{
- vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ sType = sType_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setLineWidth( float lineWidth ) const
+ RenderPassCreateInfo& setPNext( const void* pNext_ )
{
- vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ pNext = pNext_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+ RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
{
- vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ flags = flags_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+ RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
{
- vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ attachmentCount = attachmentCount_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setBlendConstants( const float blendConstants[4] ) const
+ RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
{
- vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ pAttachments = pAttachments_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setBlendConstants( const float blendConstants[4] ) const
+ RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
{
- vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ subpassCount = subpassCount_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
+ RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
{
- vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ pSubpasses = pSubpasses_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
+ RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
{
- vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ dependencyCount = dependencyCount_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
+ RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
{
- vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ pDependencies = pDependencies_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
+ operator const VkRenderPassCreateInfo&() const
{
- vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
+ bool operator==( RenderPassCreateInfo const& rhs ) const
{
- vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( subpassCount == rhs.subpassCount )
+ && ( pSubpasses == rhs.pSubpasses )
+ && ( dependencyCount == rhs.dependencyCount )
+ && ( pDependencies == rhs.pDependencies );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
+ bool operator!=( RenderPassCreateInfo const& rhs ) const
{
- vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ RenderPassCreateFlags flags;
+ uint32_t attachmentCount;
+ const AttachmentDescription* pAttachments;
+ uint32_t subpassCount;
+ const SubpassDescription* pSubpasses;
+ uint32_t dependencyCount;
+ const SubpassDependency* pDependencies;
+ };
+ static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class PresentModeKHR
+ {
+ eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+ eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+ eFifo = VK_PRESENT_MODE_FIFO_KHR,
+ eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
+ };
+
+ enum class ColorSpaceKHR
+ {
+ eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
+ };
+
+ struct SurfaceFormatKHR
+ {
+ operator const VkSurfaceFormatKHR&() const
{
- vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
+ bool operator==( SurfaceFormatKHR const& rhs ) const
{
- vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ return ( format == rhs.format )
+ && ( colorSpace == rhs.colorSpace );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
+ bool operator!=( SurfaceFormatKHR const& rhs ) const
{
- vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+ return !operator==( rhs );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
+ Format format;
+ ColorSpaceKHR colorSpace;
+ };
+ static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+
+ enum class DisplayPlaneAlphaFlagBitsKHR
+ {
+ eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+ eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+ ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+ ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+ };
+
+ using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+ VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
+ {
+ return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
+ {
+ return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+ {
+ enum
{
- vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+ };
+ };
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
+ struct DisplayPlaneCapabilitiesKHR
+ {
+ operator const VkDisplayPlaneCapabilitiesKHR&() const
{
- vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+ return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
+ bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
{
- vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+ return ( supportedAlpha == rhs.supportedAlpha )
+ && ( minSrcPosition == rhs.minSrcPosition )
+ && ( maxSrcPosition == rhs.maxSrcPosition )
+ && ( minSrcExtent == rhs.minSrcExtent )
+ && ( maxSrcExtent == rhs.maxSrcExtent )
+ && ( minDstPosition == rhs.minDstPosition )
+ && ( maxDstPosition == rhs.maxDstPosition )
+ && ( minDstExtent == rhs.minDstExtent )
+ && ( maxDstExtent == rhs.maxDstExtent );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
+ bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
{
- vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
+ return !operator==( rhs );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
- {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- assert( buffers.size() == offsets.size() );
-#else
- if ( buffers.size() != offsets.size() )
- {
- throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
- }
-#endif // VULKAN_HPP_NO_EXCEPTIONS
- vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
+ DisplayPlaneAlphaFlagsKHR supportedAlpha;
+ Offset2D minSrcPosition;
+ Offset2D maxSrcPosition;
+ Extent2D minSrcExtent;
+ Extent2D maxSrcExtent;
+ Offset2D minDstPosition;
+ Offset2D maxDstPosition;
+ Extent2D minDstExtent;
+ Extent2D maxDstExtent;
+ };
+ static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ enum class CompositeAlphaFlagBitsKHR
+ {
+ eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+ ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+ ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+ eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+ };
+
+ using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+ VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
+ {
+ return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
+ {
+ return ~( CompositeAlphaFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+ };
+ };
+
+ enum class SurfaceTransformFlagBitsKHR
+ {
+ eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+ eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+ eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+ eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+ eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+ eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+ eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+ eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+ eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+ };
+
+ using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+ VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
+ {
+ return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
+ {
+ return ~( SurfaceTransformFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+ };
+ };
+
+ struct DisplayPropertiesKHR
+ {
+ operator const VkDisplayPropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+ }
+
+ bool operator==( DisplayPropertiesKHR const& rhs ) const
+ {
+ return ( display == rhs.display )
+ && ( displayName == rhs.displayName )
+ && ( physicalDimensions == rhs.physicalDimensions )
+ && ( physicalResolution == rhs.physicalResolution )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( planeReorderPossible == rhs.planeReorderPossible )
+ && ( persistentContent == rhs.persistentContent );
+ }
+
+ bool operator!=( DisplayPropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DisplayKHR display;
+ const char* displayName;
+ Extent2D physicalDimensions;
+ Extent2D physicalResolution;
+ SurfaceTransformFlagsKHR supportedTransforms;
+ Bool32 planeReorderPossible;
+ Bool32 persistentContent;
+ };
+ static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+
+ struct DisplaySurfaceCreateInfoKHR
+ {
+ DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
+ : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
+ , pNext( nullptr )
+ , flags( flags_ )
+ , displayMode( displayMode_ )
+ , planeIndex( planeIndex_ )
+ , planeStackIndex( planeStackIndex_ )
+ , transform( transform_ )
+ , globalAlpha( globalAlpha_ )
+ , alphaMode( alphaMode_ )
+ , imageExtent( imageExtent_ )
+ {
+ }
+
+ DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+ }
+
+ DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+ {
+ displayMode = displayMode_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+ {
+ planeIndex = planeIndex_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+ {
+ planeStackIndex = planeStackIndex_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+ {
+ transform = transform_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+ {
+ globalAlpha = globalAlpha_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+ {
+ alphaMode = alphaMode_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+ operator const VkDisplaySurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( displayMode == rhs.displayMode )
+ && ( planeIndex == rhs.planeIndex )
+ && ( planeStackIndex == rhs.planeStackIndex )
+ && ( transform == rhs.transform )
+ && ( globalAlpha == rhs.globalAlpha )
+ && ( alphaMode == rhs.alphaMode )
+ && ( imageExtent == rhs.imageExtent );
+ }
+
+ bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ DisplaySurfaceCreateFlagsKHR flags;
+ DisplayModeKHR displayMode;
+ uint32_t planeIndex;
+ uint32_t planeStackIndex;
+ SurfaceTransformFlagBitsKHR transform;
+ float globalAlpha;
+ DisplayPlaneAlphaFlagBitsKHR alphaMode;
+ Extent2D imageExtent;
+ };
+ static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ struct SurfaceCapabilitiesKHR
+ {
+ operator const VkSurfaceCapabilitiesKHR&() const
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+ }
+
+ bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return ( minImageCount == rhs.minImageCount )
+ && ( maxImageCount == rhs.maxImageCount )
+ && ( currentExtent == rhs.currentExtent )
+ && ( minImageExtent == rhs.minImageExtent )
+ && ( maxImageExtent == rhs.maxImageExtent )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( currentTransform == rhs.currentTransform )
+ && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+ && ( supportedUsageFlags == rhs.supportedUsageFlags );
+ }
+
+ bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t minImageCount;
+ uint32_t maxImageCount;
+ Extent2D currentExtent;
+ Extent2D minImageExtent;
+ Extent2D maxImageExtent;
+ uint32_t maxImageArrayLayers;
+ SurfaceTransformFlagsKHR supportedTransforms;
+ SurfaceTransformFlagBitsKHR currentTransform;
+ CompositeAlphaFlagsKHR supportedCompositeAlpha;
+ ImageUsageFlags supportedUsageFlags;
+ };
+ static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ struct SwapchainCreateInfoKHR
+ {
+ SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
+ : sType( StructureType::eSwapchainCreateInfoKHR )
+ , pNext( nullptr )
+ , flags( flags_ )
+ , surface( surface_ )
+ , minImageCount( minImageCount_ )
+ , imageFormat( imageFormat_ )
+ , imageColorSpace( imageColorSpace_ )
+ , imageExtent( imageExtent_ )
+ , imageArrayLayers( imageArrayLayers_ )
+ , imageUsage( imageUsage_ )
+ , imageSharingMode( imageSharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ , preTransform( preTransform_ )
+ , compositeAlpha( compositeAlpha_ )
+ , presentMode( presentMode_ )
+ , clipped( clipped_ )
+ , oldSwapchain( oldSwapchain_ )
+ {
+ }
+
+ SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+ }
+
+ SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+ {
+ surface = surface_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+ {
+ minImageCount = minImageCount_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+ {
+ imageFormat = imageFormat_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+ {
+ imageColorSpace = imageColorSpace_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+ {
+ imageArrayLayers = imageArrayLayers_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+ {
+ imageUsage = imageUsage_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+ {
+ imageSharingMode = imageSharingMode_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+ {
+ queueFamilyIndexCount = queueFamilyIndexCount_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+ {
+ pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+ {
+ preTransform = preTransform_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+ {
+ compositeAlpha = compositeAlpha_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+ {
+ presentMode = presentMode_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+ {
+ clipped = clipped_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+ {
+ oldSwapchain = oldSwapchain_;
+ return *this;
+ }
+
+ operator const VkSwapchainCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+ }
+
+ bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( surface == rhs.surface )
+ && ( minImageCount == rhs.minImageCount )
+ && ( imageFormat == rhs.imageFormat )
+ && ( imageColorSpace == rhs.imageColorSpace )
+ && ( imageExtent == rhs.imageExtent )
+ && ( imageArrayLayers == rhs.imageArrayLayers )
+ && ( imageUsage == rhs.imageUsage )
+ && ( imageSharingMode == rhs.imageSharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+ && ( preTransform == rhs.preTransform )
+ && ( compositeAlpha == rhs.compositeAlpha )
+ && ( presentMode == rhs.presentMode )
+ && ( clipped == rhs.clipped )
+ && ( oldSwapchain == rhs.oldSwapchain );
+ }
+
+ bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ SwapchainCreateFlagsKHR flags;
+ SurfaceKHR surface;
+ uint32_t minImageCount;
+ Format imageFormat;
+ ColorSpaceKHR imageColorSpace;
+ Extent2D imageExtent;
+ uint32_t imageArrayLayers;
+ ImageUsageFlags imageUsage;
+ SharingMode imageSharingMode;
+ uint32_t queueFamilyIndexCount;
+ const uint32_t* pQueueFamilyIndices;
+ SurfaceTransformFlagBitsKHR preTransform;
+ CompositeAlphaFlagBitsKHR compositeAlpha;
+ PresentModeKHR presentMode;
+ Bool32 clipped;
+ SwapchainKHR oldSwapchain;
+ };
+ static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class DebugReportFlagBitsEXT
+ {
+ eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+ eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+ };
+
+ using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+ VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
+ {
+ return DebugReportFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
+ {
+ return ~( DebugReportFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<DebugReportFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+ };
+ };
+
+ struct DebugReportCallbackCreateInfoEXT
+ {
+ DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
+ : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
+ , pNext( nullptr )
+ , flags( flags_ )
+ , pfnCallback( pfnCallback_ )
+ , pUserData( pUserData_ )
+ {
+ }
+
+ DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+ }
+
+ DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+ {
+ pfnCallback = pfnCallback_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+ {
+ pUserData = pUserData_;
+ return *this;
+ }
+
+ operator const VkDebugReportCallbackCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+ }
+
+ bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pfnCallback == rhs.pfnCallback )
+ && ( pUserData == rhs.pUserData );
+ }
+
+ bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ DebugReportFlagsEXT flags;
+ PFN_vkDebugReportCallbackEXT pfnCallback;
+ void* pUserData;
+ };
+ static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DebugReportObjectTypeEXT
+ {
+ eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+ eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+ eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+ eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+ ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+ ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+ eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+ eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+ eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+ eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+ eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+ eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+ eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
+ eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT
+ };
+
+ struct DebugMarkerObjectNameInfoEXT
+ {
+ DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
+ : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
+ , pNext( nullptr )
+ , objectType( objectType_ )
+ , object( object_ )
+ , pObjectName( pObjectName_ )
+ {
+ }
+
+ DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+ }
+
+ DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+ {
+ object = object_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+ {
+ pObjectName = pObjectName_;
+ return *this;
+ }
+
+ operator const VkDebugMarkerObjectNameInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+ }
+
+ bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( object == rhs.object )
+ && ( pObjectName == rhs.pObjectName );
+ }
+
+ bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ DebugReportObjectTypeEXT objectType;
+ uint64_t object;
+ const char* pObjectName;
+ };
+ static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DebugMarkerObjectTagInfoEXT
+ {
+ DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
+ : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
+ , pNext( nullptr )
+ , objectType( objectType_ )
+ , object( object_ )
+ , tagName( tagName_ )
+ , tagSize( tagSize_ )
+ , pTag( pTag_ )
+ {
+ }
+
+ DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+ }
+
+ DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+ {
+ object = object_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+ {
+ tagName = tagName_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+ {
+ tagSize = tagSize_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+ {
+ pTag = pTag_;
+ return *this;
+ }
+
+ operator const VkDebugMarkerObjectTagInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+ }
+
+ bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( object == rhs.object )
+ && ( tagName == rhs.tagName )
+ && ( tagSize == rhs.tagSize )
+ && ( pTag == rhs.pTag );
+ }
+
+ bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ DebugReportObjectTypeEXT objectType;
+ uint64_t object;
+ uint64_t tagName;
+ size_t tagSize;
+ const void* pTag;
+ };
+ static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DebugReportErrorEXT
+ {
+ eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
+ eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
+ };
+
+ enum class RasterizationOrderAMD
+ {
+ eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+ eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+ };
+
+ struct PipelineRasterizationStateRasterizationOrderAMD
+ {
+ PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
+ : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
+ , pNext( nullptr )
+ , rasterizationOrder( rasterizationOrder_ )
+ {
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+ return *this;
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+ {
+ rasterizationOrder = rasterizationOrder_;
+ return *this;
+ }
+
+ operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+ {
+ return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+ }
+
+ bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( rasterizationOrder == rhs.rasterizationOrder );
+ }
+
+ bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ RasterizationOrderAMD rasterizationOrder;
+ };
+ static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+
+ enum class ExternalMemoryHandleTypeFlagBitsNV
+ {
+ eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+ eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+ eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+ eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+ };
+
+ using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
+ {
+ return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
+ {
+ return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+ };
+ };
+
+ struct ExternalMemoryImageCreateInfoNV
+ {
+ ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+ : sType( StructureType::eExternalMemoryImageCreateInfoNV )
+ , pNext( nullptr )
+ , handleTypes( handleTypes_ )
+ {
+ }
+
+ ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
+ }
+
+ ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
+ return *this;
+ }
+
+ ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExternalMemoryImageCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
+ }
+
+ bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ ExternalMemoryHandleTypeFlagsNV handleTypes;
+ };
+ static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct ExportMemoryAllocateInfoNV
+ {
+ ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+ : sType( StructureType::eExportMemoryAllocateInfoNV )
+ , pNext( nullptr )
+ , handleTypes( handleTypes_ )
+ {
+ }
+
+ ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
+ }
+
+ ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
+ return *this;
+ }
+
+ ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExportMemoryAllocateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
+ }
+
+ bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ ExternalMemoryHandleTypeFlagsNV handleTypes;
+ };
+ static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportMemoryWin32HandleInfoNV
+ {
+ ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
+ : sType( StructureType::eImportMemoryWin32HandleInfoNV )
+ , pNext( nullptr )
+ , handleType( handleType_ )
+ , handle( handle_ )
+ {
+ }
+
+ ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
+ }
+
+ ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
+ {
+ sType = sType_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ operator const VkImportMemoryWin32HandleInfoNV&() const
+ {
+ return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+ }
+
+ bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle );
+ }
+
+ bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ ExternalMemoryHandleTypeFlagsNV handleType;
+ HANDLE handle;
+ };
+ static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ enum class ExternalMemoryFeatureFlagBitsNV
+ {
+ eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+ eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+ eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+ };
+
+ using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
+ {
+ return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
+ {
+ return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+ {
+ enum
{
- vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+ };
+ };
+
+ struct ExternalImageFormatPropertiesNV
+ {
+ operator const VkExternalImageFormatPropertiesNV&() const
+ {
+ return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+ bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
{
- vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ return ( imageFormatProperties == rhs.imageFormatProperties )
+ && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+ bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
{
- vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+ return !operator==( rhs );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+ ImageFormatProperties imageFormatProperties;
+ ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
+ ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
+ ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+ };
+ static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+
+ enum class ValidationCheckEXT
+ {
+ eAll = VK_VALIDATION_CHECK_ALL_EXT
+ };
+
+ struct ValidationFlagsEXT
+ {
+ ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
+ : sType( StructureType::eValidationFlagsEXT )
+ , pNext( nullptr )
+ , disabledValidationCheckCount( disabledValidationCheckCount_ )
+ , pDisabledValidationChecks( pDisabledValidationChecks_ )
{
- vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
{
- vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
{
- vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ ValidationFlagsEXT& setSType( StructureType sType_ )
{
- vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ sType = sType_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ ValidationFlagsEXT& setPNext( const void* pNext_ )
{
- vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ pNext = pNext_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
+ ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
{
- vkCmdDispatch( m_commandBuffer, x, y, z );
+ disabledValidationCheckCount = disabledValidationCheckCount_;
+ return *this;
+ }
+
+ ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+ {
+ pDisabledValidationChecks = pDisabledValidationChecks_;
+ return *this;
+ }
+
+ operator const VkValidationFlagsEXT&() const
+ {
+ return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+ }
+
+ bool operator==( ValidationFlagsEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+ && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+ }
+
+ bool operator!=( ValidationFlagsEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ uint32_t disabledValidationCheckCount;
+ ValidationCheckEXT* pDisabledValidationChecks;
+ };
+ static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+
+ enum class IndirectCommandsLayoutUsageFlagBitsNVX
+ {
+ eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+ eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+ eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+ eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+ };
+
+ using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
+
+ VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
+ {
+ return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
+ {
+ return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+ }
+
+ template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+ {
+ enum
+ {
+ allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+ };
+ };
+
+ enum class ObjectEntryUsageFlagBitsNVX
+ {
+ eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+ eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+ };
+
+ using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
+
+ VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
+ {
+ return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
+ {
+ return ~( ObjectEntryUsageFlagsNVX( bits ) );
+ }
+
+ template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
+ {
+ enum
+ {
+ allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
+ };
+ };
+
+ enum class IndirectCommandsTokenTypeNVX
+ {
+ eVkIndirectCommandsTokenPipeline = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
+ eVkIndirectCommandsTokenDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX,
+ eVkIndirectCommandsTokenIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX,
+ eVkIndirectCommandsTokenVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX,
+ eVkIndirectCommandsTokenPushConstant = VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX,
+ eVkIndirectCommandsTokenDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX,
+ eVkIndirectCommandsTokenDraw = VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX,
+ eVkIndirectCommandsTokenDispatch = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX
+ };
+
+ struct IndirectCommandsTokenNVX
+ {
+ IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
+ : tokenType( tokenType_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ {
+ }
+
+ IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
+ }
+
+ IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
+ return *this;
+ }
+
+ IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+ {
+ tokenType = tokenType_;
+ return *this;
+ }
+
+ IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ operator const VkIndirectCommandsTokenNVX&() const
+ {
+ return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
+ }
+
+ bool operator==( IndirectCommandsTokenNVX const& rhs ) const
+ {
+ return ( tokenType == rhs.tokenType )
+ && ( buffer == rhs.buffer )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ IndirectCommandsTokenTypeNVX tokenType;
+ Buffer buffer;
+ DeviceSize offset;
+ };
+ static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
+
+ struct IndirectCommandsLayoutTokenNVX
+ {
+ IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
+ : tokenType( tokenType_ )
+ , bindingUnit( bindingUnit_ )
+ , dynamicCount( dynamicCount_ )
+ , divisor( divisor_ )
+ {
+ }
+
+ IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
+ }
+
+ IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+ {
+ tokenType = tokenType_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
+ {
+ bindingUnit = bindingUnit_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
+ {
+ dynamicCount = dynamicCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
+ {
+ divisor = divisor_;
+ return *this;
+ }
+
+ operator const VkIndirectCommandsLayoutTokenNVX&() const
+ {
+ return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
+ }
+
+ bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
+ {
+ return ( tokenType == rhs.tokenType )
+ && ( bindingUnit == rhs.bindingUnit )
+ && ( dynamicCount == rhs.dynamicCount )
+ && ( divisor == rhs.divisor );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
+ bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
{
- vkCmdDispatch( m_commandBuffer, x, y, z );
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+ IndirectCommandsTokenTypeNVX tokenType;
+ uint32_t bindingUnit;
+ uint32_t dynamicCount;
+ uint32_t divisor;
+ };
+ static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
+
+ struct IndirectCommandsLayoutCreateInfoNVX
+ {
+ IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
+ : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
+ , pNext( nullptr )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , flags( flags_ )
+ , tokenCount( tokenCount_ )
+ , pTokens( pTokens_ )
{
- vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+ IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
{
- vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+ memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
+ IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
{
- vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
+ memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
+ IndirectCommandsLayoutCreateInfoNVX& setSType( StructureType sType_ )
{
- vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+ sType = sType_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
+ IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
{
- vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+ pNext = pNext_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
+ IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
{
- vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
+ IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
{
- vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+ flags = flags_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
+ IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
{
- vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+ tokenCount = tokenCount_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+ IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
{
- vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+ pTokens = pTokens_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
+ operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
{
- vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+ return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+ bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
{
- vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( flags == rhs.flags )
+ && ( tokenCount == rhs.tokenCount )
+ && ( pTokens == rhs.pTokens );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
+ bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
{
- vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ PipelineBindPoint pipelineBindPoint;
+ IndirectCommandsLayoutUsageFlagsNVX flags;
+ uint32_t tokenCount;
+ const IndirectCommandsLayoutTokenNVX* pTokens;
+ };
+ static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
+
+ enum class ObjectEntryTypeNVX
+ {
+ eVkObjectEntryDescriptorSet = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
+ eVkObjectEntryPipeline = VK_OBJECT_ENTRY_PIPELINE_NVX,
+ eVkObjectEntryIndexBuffer = VK_OBJECT_ENTRY_INDEX_BUFFER_NVX,
+ eVkObjectEntryVertexBuffer = VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX,
+ eVkObjectEntryPushConstant = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX
+ };
+
+ struct ObjectTableCreateInfoNVX
+ {
+ ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
+ : sType( StructureType::eObjectTableCreateInfoNVX )
+ , pNext( nullptr )
+ , objectCount( objectCount_ )
+ , pObjectEntryTypes( pObjectEntryTypes_ )
+ , pObjectEntryCounts( pObjectEntryCounts_ )
+ , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+ , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+ , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+ , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+ , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+ , maxPipelineLayouts( maxPipelineLayouts_ )
{
- vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T>
- void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
+ ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
{
- vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
+ memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
+ ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
{
- vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+ memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
+ ObjectTableCreateInfoNVX& setSType( StructureType sType_ )
{
- vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+ sType = sType_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+ ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
{
- vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+ pNext = pNext_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
+ ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
{
- vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+ objectCount = objectCount_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+ ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
{
- vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+ pObjectEntryTypes = pObjectEntryTypes_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
+ ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
{
- vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+ pObjectEntryCounts = pObjectEntryCounts_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
+ ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
{
- vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
+ pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
+ ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
{
- vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+ maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
+ ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
{
- vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
+ maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
+ ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
{
- vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
+ maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setEvent( Event event, PipelineStageFlags stageMask ) const
+ ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
{
- vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void setEvent( Event event, PipelineStageFlags stageMask ) const
+ ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
{
- vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ maxPipelineLayouts = maxPipelineLayouts_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void resetEvent( Event event, PipelineStageFlags stageMask ) const
+ operator const VkObjectTableCreateInfoNVX&() const
{
- vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void resetEvent( Event event, PipelineStageFlags stageMask ) const
+ bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
{
- vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectCount == rhs.objectCount )
+ && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
+ && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
+ && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
+ && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
+ && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
+ && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
+ && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
+ && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+ bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
{
- vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+ return !operator==( rhs );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+ private:
+ StructureType sType;
+
+ public:
+ const void* pNext;
+ uint32_t objectCount;
+ const ObjectEntryTypeNVX* pObjectEntryTypes;
+ const uint32_t* pObjectEntryCounts;
+ const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+ uint32_t maxUniformBuffersPerDescriptor;
+ uint32_t maxStorageBuffersPerDescriptor;
+ uint32_t maxStorageImagesPerDescriptor;
+ uint32_t maxSampledImagesPerDescriptor;
+ uint32_t maxPipelineLayouts;
+ };
+ static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableEntryNVX
+ {
+ ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
+ : type( type_ )
+ , flags( flags_ )
{
- vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+ ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
{
- vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+ memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+ ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
{
- vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+ memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
+ ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ type = type_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
+ ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
- vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ flags = flags_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void endQuery( QueryPool queryPool, uint32_t query ) const
+ operator const VkObjectTableEntryNVX&() const
{
- vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void endQuery( QueryPool queryPool, uint32_t query ) const
+ bool operator==( ObjectTableEntryNVX const& rhs ) const
{
- vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ return ( type == rhs.type )
+ && ( flags == rhs.flags );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+ bool operator!=( ObjectTableEntryNVX const& rhs ) const
{
- vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ return !operator==( rhs );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ };
+ static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTablePipelineEntryNVX
+ {
+ ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipeline( pipeline_ )
{
- vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
+ ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
{
- vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+ memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
+ ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
{
- vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+ memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
+ ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+ type = type_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
+ ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
- vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+ flags = flags_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
+ ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
{
- vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+ pipeline = pipeline_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T>
- void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
+ operator const VkObjectTablePipelineEntryNVX&() const
{
- vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+ return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
+ bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
{
- vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipeline == rhs.pipeline );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
+ bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
{
- vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void nextSubpass( SubpassContents contents ) const
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Pipeline pipeline;
+ };
+ static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableDescriptorSetEntryNVX
+ {
+ ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipelineLayout( pipelineLayout_ )
+ , descriptorSet( descriptorSet_ )
{
- vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void nextSubpass( SubpassContents contents ) const
+ ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
{
- vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+ memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void endRenderPass( ) const
+ ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
{
- vkCmdEndRenderPass( m_commandBuffer );
+ memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void endRenderPass() const
+ ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- vkCmdEndRenderPass( m_commandBuffer );
+ type = type_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
+ ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
- vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+ flags = flags_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
+ ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
{
- vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+ pipelineLayout = pipelineLayout_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+ ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
{
- vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+ descriptorSet = descriptorSet_;
+ return *this;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
+ operator const VkObjectTableDescriptorSetEntryNVX&() const
{
- DebugMarkerMarkerInfoEXT markerInfo;
- vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
- return markerInfo;
+ return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void debugMarkerEndEXT( ) const
+ bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
{
- vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipelineLayout == rhs.pipelineLayout )
+ && ( descriptorSet == rhs.descriptorSet );
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void debugMarkerEndEXT() const
+ bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
{
- vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ return !operator==( rhs );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ PipelineLayout pipelineLayout;
+ DescriptorSet descriptorSet;
+ };
+ static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableVertexBufferEntryNVX
+ {
+ ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+ : type( type_ )
+ , flags( flags_ )
+ , buffer( buffer_ )
{
- vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
+ ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
{
- DebugMarkerMarkerInfoEXT markerInfo;
- vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
- return markerInfo;
+ memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
{
- vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ type = type_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
- vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ flags = flags_;
+ return *this;
}
-#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
{
- vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ buffer = buffer_;
+ return *this;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- explicit
-#endif
- operator VkCommandBuffer() const
+ operator const VkObjectTableVertexBufferEntryNVX&() const
{
- return m_commandBuffer;
+ return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
}
- explicit operator bool() const
+ bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
{
- return m_commandBuffer != VK_NULL_HANDLE;
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( buffer == rhs.buffer );
}
- bool operator!() const
+ bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
{
- return m_commandBuffer == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
- private:
- VkCommandBuffer m_commandBuffer;
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Buffer buffer;
};
- static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+ static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
- struct SubpassDependency
+ struct ObjectTableIndexBufferEntryNVX
{
- SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
- : srcSubpass( srcSubpass_ )
- , dstSubpass( dstSubpass_ )
- , srcStageMask( srcStageMask_ )
- , dstStageMask( dstStageMask_ )
- , srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , dependencyFlags( dependencyFlags_ )
+ ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+ : type( type_ )
+ , flags( flags_ )
+ , buffer( buffer_ )
{
}
- SubpassDependency( VkSubpassDependency const & rhs )
+ ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(SubpassDependency) );
+ memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
}
- SubpassDependency& operator=( VkSubpassDependency const & rhs )
+ ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(SubpassDependency) );
+ memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
return *this;
}
- SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
+ ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- srcSubpass = srcSubpass_;
+ type = type_;
return *this;
}
- SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
+ ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
- dstSubpass = dstSubpass_;
+ flags = flags_;
return *this;
}
- SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
+ ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
{
- srcStageMask = srcStageMask_;
+ buffer = buffer_;
return *this;
}
- SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
+ operator const VkObjectTableIndexBufferEntryNVX&() const
{
- dstStageMask = dstStageMask_;
- return *this;
+ return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
}
- SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
+ bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
{
- srcAccessMask = srcAccessMask_;
- return *this;
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( buffer == rhs.buffer );
}
- SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
+ bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
{
- dstAccessMask = dstAccessMask_;
- return *this;
+ return !operator==( rhs );
}
- SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Buffer buffer;
+ };
+ static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTablePushConstantEntryNVX
+ {
+ ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipelineLayout( pipelineLayout_ )
+ , stageFlags( stageFlags_ )
{
- dependencyFlags = dependencyFlags_;
- return *this;
}
- operator const VkSubpassDependency&() const
+ ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
{
- return *reinterpret_cast<const VkSubpassDependency*>(this);
+ memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
}
- bool operator==( SubpassDependency const& rhs ) const
+ ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
{
- return ( srcSubpass == rhs.srcSubpass )
- && ( dstSubpass == rhs.dstSubpass )
- && ( srcStageMask == rhs.srcStageMask )
- && ( dstStageMask == rhs.dstStageMask )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask )
- && ( dependencyFlags == rhs.dependencyFlags );
+ memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
+ return *this;
}
- bool operator!=( SubpassDependency const& rhs ) const
+ ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
{
- return !operator==( rhs );
+ type = type_;
+ return *this;
}
- uint32_t srcSubpass;
- uint32_t dstSubpass;
- PipelineStageFlags srcStageMask;
- PipelineStageFlags dstStageMask;
- AccessFlags srcAccessMask;
- AccessFlags dstAccessMask;
- DependencyFlags dependencyFlags;
- };
- static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
-
- struct RenderPassCreateInfo
- {
- RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
- : sType( StructureType::eRenderPassCreateInfo )
- , pNext( nullptr )
- , flags( flags_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , subpassCount( subpassCount_ )
- , pSubpasses( pSubpasses_ )
- , dependencyCount( dependencyCount_ )
- , pDependencies( pDependencies_ )
+ ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
{
+ flags = flags_;
+ return *this;
}
- RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
+ ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
{
- memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+ pipelineLayout = pipelineLayout_;
+ return *this;
}
- RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
+ ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
{
- memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
+ stageFlags = stageFlags_;
return *this;
}
- RenderPassCreateInfo& setSType( StructureType sType_ )
+ operator const VkObjectTablePushConstantEntryNVX&() const
{
- sType = sType_;
- return *this;
+ return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
}
- RenderPassCreateInfo& setPNext( const void* pNext_ )
+ bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
{
- pNext = pNext_;
- return *this;
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipelineLayout == rhs.pipelineLayout )
+ && ( stageFlags == rhs.stageFlags );
}
- RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
- {
- flags = flags_;
- return *this;
- }
+ bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ PipelineLayout pipelineLayout;
+ ShaderStageFlags stageFlags;
+ };
+ static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
+
+ VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
+ {
+ return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<LayerProperties>>
+ typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
+ {
+ std::vector<LayerProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ assert( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
+ {
+ return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<ExtensionProperties>>
+ typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
+ {
+ std::vector<ExtensionProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ assert( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ // forward declarations
+ struct CmdProcessCommandsInfoNVX;
- RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
- {
- attachmentCount = attachmentCount_;
- return *this;
- }
+ class CommandBuffer
+ {
+ public:
+ CommandBuffer()
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
- RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
- {
- pAttachments = pAttachments_;
- return *this;
- }
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandBuffer(VkCommandBuffer commandBuffer)
+ : m_commandBuffer(commandBuffer)
+ {}
- RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
+ CommandBuffer& operator=(VkCommandBuffer commandBuffer)
{
- subpassCount = subpassCount_;
+ m_commandBuffer = commandBuffer;
return *this;
}
+#endif
- RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
+ bool operator==(CommandBuffer const &rhs) const
{
- pSubpasses = pSubpasses_;
- return *this;
+ return m_commandBuffer == rhs.m_commandBuffer;
}
- RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
+ bool operator!=(CommandBuffer const &rhs) const
{
- dependencyCount = dependencyCount_;
- return *this;
+ return m_commandBuffer != rhs.m_commandBuffer;
}
- RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
+ bool operator<(CommandBuffer const &rhs) const
{
- pDependencies = pDependencies_;
- return *this;
+ return m_commandBuffer < rhs.m_commandBuffer;
}
- operator const VkRenderPassCreateInfo&() const
+ Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
{
- return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
+ return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
}
- bool operator==( RenderPassCreateInfo const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( attachmentCount == rhs.attachmentCount )
- && ( pAttachments == rhs.pAttachments )
- && ( subpassCount == rhs.subpassCount )
- && ( pSubpasses == rhs.pSubpasses )
- && ( dependencyCount == rhs.dependencyCount )
- && ( pDependencies == rhs.pDependencies );
+ Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
+ return createResultValue( result, "vk::CommandBuffer::begin" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( RenderPassCreateInfo const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Result end( ) const
{
- return !operator==( rhs );
+ return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- RenderPassCreateFlags flags;
- uint32_t attachmentCount;
- const AttachmentDescription* pAttachments;
- uint32_t subpassCount;
- const SubpassDescription* pSubpasses;
- uint32_t dependencyCount;
- const SubpassDependency* pDependencies;
- };
- static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
-
- struct SubmitInfo
- {
- SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
- : sType( StructureType::eSubmitInfo )
- , pNext( nullptr )
- , waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , pWaitDstStageMask( pWaitDstStageMask_ )
- , commandBufferCount( commandBufferCount_ )
- , pCommandBuffers( pCommandBuffers_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphores( pSignalSemaphores_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type end() const
{
+ Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+ return createResultValue( result, "vk::CommandBuffer::end" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo( VkSubmitInfo const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Result reset( CommandBufferResetFlags flags ) const
{
- memcpy( this, &rhs, sizeof(SubmitInfo) );
+ return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& operator=( VkSubmitInfo const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
{
- memcpy( this, &rhs, sizeof(SubmitInfo) );
- return *this;
+ Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ return createResultValue( result, "vk::CommandBuffer::reset" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setSType( StructureType sType_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
{
- sType = sType_;
- return *this;
+ vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setPNext( const void* pNext_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
{
- pNext = pNext_;
- return *this;
+ vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
{
- waitSemaphoreCount = waitSemaphoreCount_;
- return *this;
+ vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
}
- SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
{
- pWaitSemaphores = pWaitSemaphores_;
- return *this;
+ vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
+ void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
{
- pWaitDstStageMask = pWaitDstStageMask_;
- return *this;
+ vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
}
- SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
{
- commandBufferCount = commandBufferCount_;
- return *this;
+ vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setLineWidth( float lineWidth ) const
{
- pCommandBuffers = pCommandBuffers_;
- return *this;
+ vkCmdSetLineWidth( m_commandBuffer, lineWidth );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setLineWidth( float lineWidth ) const
{
- signalSemaphoreCount = signalSemaphoreCount_;
- return *this;
+ vkCmdSetLineWidth( m_commandBuffer, lineWidth );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
{
- pSignalSemaphores = pSignalSemaphores_;
- return *this;
+ vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkSubmitInfo&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
{
- return *reinterpret_cast<const VkSubmitInfo*>(this);
+ vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( SubmitInfo const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setBlendConstants( const float blendConstants[4] ) const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
- && ( pWaitSemaphores == rhs.pWaitSemaphores )
- && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
- && ( commandBufferCount == rhs.commandBufferCount )
- && ( pCommandBuffers == rhs.pCommandBuffers )
- && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
- && ( pSignalSemaphores == rhs.pSignalSemaphores );
+ vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( SubmitInfo const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setBlendConstants( const float blendConstants[4] ) const
{
- return !operator==( rhs );
+ vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- uint32_t waitSemaphoreCount;
- const Semaphore* pWaitSemaphores;
- const PipelineStageFlags* pWaitDstStageMask;
- uint32_t commandBufferCount;
- const CommandBuffer* pCommandBuffers;
- uint32_t signalSemaphoreCount;
- const Semaphore* pSignalSemaphores;
- };
- static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
-
- class Queue
- {
- public:
- Queue()
- : m_queue(VK_NULL_HANDLE)
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Queue(VkQueue queue)
- : m_queue(queue)
- {}
-
- Queue& operator=(VkQueue queue)
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
{
- m_queue = queue;
- return *this;
+ vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
}
-#endif
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==(Queue const &rhs) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
{
- return m_queue == rhs.m_queue;
+ vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=(Queue const &rhs) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
{
- return m_queue != rhs.m_queue;
+ vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator<(Queue const &rhs) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
{
- return m_queue < rhs.m_queue;
+ vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
{
- return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
+ void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
{
- Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
- return createResultValue( result, "vk::Queue::submit" );
+ vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- Result waitIdle( ) const
+ void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
{
- return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+ vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type waitIdle() const
+ void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
{
- Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
- return createResultValue( result, "vk::Queue::waitIdle" );
+ vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
+ void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
{
- return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+ vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
+ void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
{
- Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
- return createResultValue( result, "vk::Queue::bindSparse" );
+ vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
{
- return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
+ vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- Result presentKHR( const PresentInfoKHR & presentInfo ) const
+ void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
{
- Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
- return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+ vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- explicit
-#endif
- operator VkQueue() const
+ void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
{
- return m_queue;
+ vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
}
- explicit operator bool() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
{
- return m_queue != VK_NULL_HANDLE;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ assert( buffers.size() == offsets.size() );
+#else
+ if ( buffers.size() != offsets.size() )
+ {
+ throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
{
- return m_queue == VK_NULL_HANDLE;
+ vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- private:
- VkQueue m_queue;
- };
- static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
-
- enum class PresentModeKHR
- {
- eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
- eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
- eFifo = VK_PRESENT_MODE_FIFO_KHR,
- eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
- };
-
- enum class ColorSpaceKHR
- {
- eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
- };
-
- struct SurfaceFormatKHR
- {
- operator const VkSurfaceFormatKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
{
- return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
+ vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( SurfaceFormatKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
{
- return ( format == rhs.format )
- && ( colorSpace == rhs.colorSpace );
+ vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( SurfaceFormatKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
{
- return !operator==( rhs );
+ vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- Format format;
- ColorSpaceKHR colorSpace;
- };
- static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
-
- enum class DisplayPlaneAlphaFlagBitsKHR
- {
- eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
- eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
- ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
- ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
- };
-
- using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
-
- inline DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
- }
-
- struct DisplayPlaneCapabilitiesKHR
- {
- operator const VkDisplayPlaneCapabilitiesKHR&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
{
- return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
+ vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
{
- return ( supportedAlpha == rhs.supportedAlpha )
- && ( minSrcPosition == rhs.minSrcPosition )
- && ( maxSrcPosition == rhs.maxSrcPosition )
- && ( minSrcExtent == rhs.minSrcExtent )
- && ( maxSrcExtent == rhs.maxSrcExtent )
- && ( minDstPosition == rhs.minDstPosition )
- && ( maxDstPosition == rhs.maxDstPosition )
- && ( minDstExtent == rhs.minDstExtent )
- && ( maxDstExtent == rhs.maxDstExtent );
+ vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
{
- return !operator==( rhs );
+ vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplayPlaneAlphaFlagsKHR supportedAlpha;
- Offset2D minSrcPosition;
- Offset2D maxSrcPosition;
- Extent2D minSrcExtent;
- Extent2D maxSrcExtent;
- Offset2D minDstPosition;
- Offset2D maxDstPosition;
- Extent2D minDstExtent;
- Extent2D maxDstExtent;
- };
- static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
-
- enum class CompositeAlphaFlagBitsKHR
- {
- eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
- ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
- ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
- eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
- };
-
- using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
-
- inline CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
- {
- return CompositeAlphaFlagsKHR( bit0 ) | bit1;
- }
-
- enum class SurfaceTransformFlagBitsKHR
- {
- eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
- eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
- eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
- eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
- eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
- eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
- eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
- eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
- eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
- };
-
- using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
-
- inline SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
- {
- return SurfaceTransformFlagsKHR( bit0 ) | bit1;
- }
-
- struct DisplayPropertiesKHR
- {
- operator const VkDisplayPropertiesKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
{
- return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+ vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( DisplayPropertiesKHR const& rhs ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
{
- return ( display == rhs.display )
- && ( displayName == rhs.displayName )
- && ( physicalDimensions == rhs.physicalDimensions )
- && ( physicalResolution == rhs.physicalResolution )
- && ( supportedTransforms == rhs.supportedTransforms )
- && ( planeReorderPossible == rhs.planeReorderPossible )
- && ( persistentContent == rhs.persistentContent );
+ vkCmdDispatch( m_commandBuffer, x, y, z );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( DisplayPropertiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
{
- return !operator==( rhs );
+ vkCmdDispatch( m_commandBuffer, x, y, z );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplayKHR display;
- const char* displayName;
- Extent2D physicalDimensions;
- Extent2D physicalResolution;
- SurfaceTransformFlagsKHR supportedTransforms;
- Bool32 planeReorderPossible;
- Bool32 persistentContent;
- };
- static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+ {
+ vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+ }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- struct DisplaySurfaceCreateInfoKHR
- {
- DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
- : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
- , pNext( nullptr )
- , flags( flags_ )
- , displayMode( displayMode_ )
- , planeIndex( planeIndex_ )
- , planeStackIndex( planeStackIndex_ )
- , transform( transform_ )
- , globalAlpha( globalAlpha_ )
- , alphaMode( alphaMode_ )
- , imageExtent( imageExtent_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
{
+ vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+ void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
{
- memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
+ vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
}
- DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
{
- memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
- return *this;
+ vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
+ void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
{
- sType = sType_;
- return *this;
+ vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
}
- DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
{
- pNext = pNext_;
- return *this;
+ vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+ void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
{
- flags = flags_;
- return *this;
+ vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
}
- DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
{
- displayMode = displayMode_;
- return *this;
+ vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+ void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
{
- planeIndex = planeIndex_;
- return *this;
+ vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
- DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
{
- planeStackIndex = planeStackIndex_;
- return *this;
+ vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+ void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
{
- transform = transform_;
- return *this;
+ vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
- DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
{
- globalAlpha = globalAlpha_;
- return *this;
+ vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+ void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
{
- alphaMode = alphaMode_;
- return *this;
+ vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
}
- DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T>
+ void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
{
- imageExtent = imageExtent_;
- return *this;
+ vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkDisplaySurfaceCreateInfoKHR&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
{
- return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+ vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( displayMode == rhs.displayMode )
- && ( planeIndex == rhs.planeIndex )
- && ( planeStackIndex == rhs.planeStackIndex )
- && ( transform == rhs.transform )
- && ( globalAlpha == rhs.globalAlpha )
- && ( alphaMode == rhs.alphaMode )
- && ( imageExtent == rhs.imageExtent );
+ vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+ void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
{
- return !operator==( rhs );
+ vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
}
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- DisplaySurfaceCreateFlagsKHR flags;
- DisplayModeKHR displayMode;
- uint32_t planeIndex;
- uint32_t planeStackIndex;
- SurfaceTransformFlagBitsKHR transform;
- float globalAlpha;
- DisplayPlaneAlphaFlagBitsKHR alphaMode;
- Extent2D imageExtent;
- };
- static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
+ {
+ vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- struct SurfaceCapabilitiesKHR
- {
- operator const VkSurfaceCapabilitiesKHR&() const
+ void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
{
- return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+ vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
}
- bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
{
- return ( minImageCount == rhs.minImageCount )
- && ( maxImageCount == rhs.maxImageCount )
- && ( currentExtent == rhs.currentExtent )
- && ( minImageExtent == rhs.minImageExtent )
- && ( maxImageExtent == rhs.maxImageExtent )
- && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
- && ( supportedTransforms == rhs.supportedTransforms )
- && ( currentTransform == rhs.currentTransform )
- && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
- && ( supportedUsageFlags == rhs.supportedUsageFlags );
+ vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+ void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
{
- return !operator==( rhs );
+ vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
}
- uint32_t minImageCount;
- uint32_t maxImageCount;
- Extent2D currentExtent;
- Extent2D minImageExtent;
- Extent2D maxImageExtent;
- uint32_t maxImageArrayLayers;
- SurfaceTransformFlagsKHR supportedTransforms;
- SurfaceTransformFlagBitsKHR currentTransform;
- CompositeAlphaFlagsKHR supportedCompositeAlpha;
- ImageUsageFlags supportedUsageFlags;
- };
- static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
+ {
+ vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- struct SwapchainCreateInfoKHR
- {
- SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
- : sType( StructureType::eSwapchainCreateInfoKHR )
- , pNext( nullptr )
- , flags( flags_ )
- , surface( surface_ )
- , minImageCount( minImageCount_ )
- , imageFormat( imageFormat_ )
- , imageColorSpace( imageColorSpace_ )
- , imageExtent( imageExtent_ )
- , imageArrayLayers( imageArrayLayers_ )
- , imageUsage( imageUsage_ )
- , imageSharingMode( imageSharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- , preTransform( preTransform_ )
- , compositeAlpha( compositeAlpha_ )
- , presentMode( presentMode_ )
- , clipped( clipped_ )
- , oldSwapchain( oldSwapchain_ )
+ void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
{
+ vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
}
- SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
{
- memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
+ vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setEvent( Event event, PipelineStageFlags stageMask ) const
{
- memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
- return *this;
+ vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setEvent( Event event, PipelineStageFlags stageMask ) const
{
- sType = sType_;
- return *this;
+ vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void resetEvent( Event event, PipelineStageFlags stageMask ) const
{
- pNext = pNext_;
- return *this;
+ vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void resetEvent( Event event, PipelineStageFlags stageMask ) const
{
- flags = flags_;
- return *this;
+ vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+ void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
{
- surface = surface_;
- return *this;
+ vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
}
- SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
{
- minImageCount = minImageCount_;
- return *this;
+ vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+ void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
{
- imageFormat = imageFormat_;
- return *this;
+ vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
}
- SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
{
- imageColorSpace = imageColorSpace_;
- return *this;
+ vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
{
- imageExtent = imageExtent_;
- return *this;
+ vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
{
- imageArrayLayers = imageArrayLayers_;
- return *this;
+ vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void endQuery( QueryPool queryPool, uint32_t query ) const
{
- imageUsage = imageUsage_;
- return *this;
+ vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void endQuery( QueryPool queryPool, uint32_t query ) const
{
- imageSharingMode = imageSharingMode_;
- return *this;
+ vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
{
- queueFamilyIndexCount = queueFamilyIndexCount_;
- return *this;
+ vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
{
- pQueueFamilyIndices = pQueueFamilyIndices_;
- return *this;
+ vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
{
- preTransform = preTransform_;
- return *this;
+ vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
{
- compositeAlpha = compositeAlpha_;
- return *this;
+ vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
{
- presentMode = presentMode_;
- return *this;
+ vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
{
- clipped = clipped_;
- return *this;
+ vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+ void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
{
- oldSwapchain = oldSwapchain_;
- return *this;
+ vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
}
- operator const VkSwapchainCreateInfoKHR&() const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T>
+ void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
{
- return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+ vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+ void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( surface == rhs.surface )
- && ( minImageCount == rhs.minImageCount )
- && ( imageFormat == rhs.imageFormat )
- && ( imageColorSpace == rhs.imageColorSpace )
- && ( imageExtent == rhs.imageExtent )
- && ( imageArrayLayers == rhs.imageArrayLayers )
- && ( imageUsage == rhs.imageUsage )
- && ( imageSharingMode == rhs.imageSharingMode )
- && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
- && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
- && ( preTransform == rhs.preTransform )
- && ( compositeAlpha == rhs.compositeAlpha )
- && ( presentMode == rhs.presentMode )
- && ( clipped == rhs.clipped )
- && ( oldSwapchain == rhs.oldSwapchain );
+ vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
- bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
{
- return !operator==( rhs );
+ vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- SwapchainCreateFlagsKHR flags;
- SurfaceKHR surface;
- uint32_t minImageCount;
- Format imageFormat;
- ColorSpaceKHR imageColorSpace;
- Extent2D imageExtent;
- uint32_t imageArrayLayers;
- ImageUsageFlags imageUsage;
- SharingMode imageSharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- SurfaceTransformFlagBitsKHR preTransform;
- CompositeAlphaFlagBitsKHR compositeAlpha;
- PresentModeKHR presentMode;
- Bool32 clipped;
- SwapchainKHR oldSwapchain;
- };
- static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
-
- enum class DebugReportFlagBitsEXT
- {
- eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
- eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
- ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
- eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
- };
-
- using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
-
- inline DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
- {
- return DebugReportFlagsEXT( bit0 ) | bit1;
- }
-
- struct DebugReportCallbackCreateInfoEXT
- {
- DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
- : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
- , pNext( nullptr )
- , flags( flags_ )
- , pfnCallback( pfnCallback_ )
- , pUserData( pUserData_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void nextSubpass( SubpassContents contents ) const
{
+ vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void nextSubpass( SubpassContents contents ) const
{
- memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
+ vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void endRenderPass( ) const
{
- memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
- return *this;
+ vkCmdEndRenderPass( m_commandBuffer );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void endRenderPass() const
{
- sType = sType_;
- return *this;
+ vkCmdEndRenderPass( m_commandBuffer );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+ void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
{
- pNext = pNext_;
- return *this;
+ vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
}
- DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
{
- flags = flags_;
- return *this;
+ vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+ void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
{
- pfnCallback = pfnCallback_;
- return *this;
+ vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
}
- DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
{
- pUserData = pUserData_;
- return *this;
+ DebugMarkerMarkerInfoEXT markerInfo;
+ vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+ return markerInfo;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkDebugReportCallbackCreateInfoEXT&() const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void debugMarkerEndEXT( ) const
{
- return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+ vkCmdDebugMarkerEndEXT( m_commandBuffer );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void debugMarkerEndEXT() const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( pfnCallback == rhs.pfnCallback )
- && ( pUserData == rhs.pUserData );
+ vkCmdDebugMarkerEndEXT( m_commandBuffer );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+ void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
{
- return !operator==( rhs );
+ vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
}
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- DebugReportFlagsEXT flags;
- PFN_vkDebugReportCallbackEXT pfnCallback;
- void* pUserData;
- };
- static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
-
- enum class DebugReportObjectTypeEXT
- {
- eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
- eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
- eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
- eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
- eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
- eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
- eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
- ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
- eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
- eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
- eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
- eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
- eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
- eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT
- };
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
+ {
+ DebugMarkerMarkerInfoEXT markerInfo;
+ vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+ return markerInfo;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- struct DebugMarkerObjectNameInfoEXT
- {
- DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
- : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
- , pNext( nullptr )
- , objectType( objectType_ )
- , object( object_ )
- , pObjectName( pObjectName_ )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
{
+ vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
{
- memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
+ vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
{
- memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
- return *this;
+ vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
}
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
{
- sType = sType_;
- return *this;
+ vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+ void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
{
- pNext = pNext_;
- return *this;
+ vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
}
- DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
{
- objectType = objectType_;
- return *this;
+ vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+ void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
{
- object = object_;
- return *this;
+ vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
}
- DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
{
- pObjectName = pObjectName_;
- return *this;
+ vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkDebugMarkerObjectNameInfoEXT&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ explicit
+#endif
+ operator VkCommandBuffer() const
{
- return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+ return m_commandBuffer;
}
- bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ explicit operator bool() const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( objectType == rhs.objectType )
- && ( object == rhs.object )
- && ( pObjectName == rhs.pObjectName );
+ return m_commandBuffer != VK_NULL_HANDLE;
}
- bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ bool operator!() const
{
- return !operator==( rhs );
+ return m_commandBuffer == VK_NULL_HANDLE;
}
private:
- StructureType sType;
-
- public:
- const void* pNext;
- DebugReportObjectTypeEXT objectType;
- uint64_t object;
- const char* pObjectName;
+ VkCommandBuffer m_commandBuffer;
};
- static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
- struct DebugMarkerObjectTagInfoEXT
+ struct SubmitInfo
{
- DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
- : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
+ SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+ : sType( StructureType::eSubmitInfo )
, pNext( nullptr )
- , objectType( objectType_ )
- , object( object_ )
- , tagName( tagName_ )
- , tagSize( tagSize_ )
- , pTag( pTag_ )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , pWaitDstStageMask( pWaitDstStageMask_ )
+ , commandBufferCount( commandBufferCount_ )
+ , pCommandBuffers( pCommandBuffers_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphores( pSignalSemaphores_ )
{
}
- DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ SubmitInfo( VkSubmitInfo const & rhs )
{
- memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+ memcpy( this, &rhs, sizeof(SubmitInfo) );
}
- DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ SubmitInfo& operator=( VkSubmitInfo const & rhs )
{
- memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
+ memcpy( this, &rhs, sizeof(SubmitInfo) );
return *this;
}
- DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
+ SubmitInfo& setSType( StructureType sType_ )
{
sType = sType_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+ SubmitInfo& setPNext( const void* pNext_ )
{
pNext = pNext_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+ SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
{
- objectType = objectType_;
+ waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+ SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
{
- object = object_;
+ pWaitSemaphores = pWaitSemaphores_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+ SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
{
- tagName = tagName_;
+ pWaitDstStageMask = pWaitDstStageMask_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+ SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
{
- tagSize = tagSize_;
+ commandBufferCount = commandBufferCount_;
return *this;
}
- DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+ SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
{
- pTag = pTag_;
+ pCommandBuffers = pCommandBuffers_;
return *this;
}
- operator const VkDebugMarkerObjectTagInfoEXT&() const
+ SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
{
- return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
}
- bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+ {
+ pSignalSemaphores = pSignalSemaphores_;
+ return *this;
+ }
+
+ operator const VkSubmitInfo&() const
+ {
+ return *reinterpret_cast<const VkSubmitInfo*>(this);
+ }
+
+ bool operator==( SubmitInfo const& rhs ) const
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( objectType == rhs.objectType )
- && ( object == rhs.object )
- && ( tagName == rhs.tagName )
- && ( tagSize == rhs.tagSize )
- && ( pTag == rhs.pTag );
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+ && ( commandBufferCount == rhs.commandBufferCount )
+ && ( pCommandBuffers == rhs.pCommandBuffers )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphores == rhs.pSignalSemaphores );
}
- bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ bool operator!=( SubmitInfo const& rhs ) const
{
return !operator==( rhs );
}
public:
const void* pNext;
- DebugReportObjectTypeEXT objectType;
- uint64_t object;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
+ uint32_t waitSemaphoreCount;
+ const Semaphore* pWaitSemaphores;
+ const PipelineStageFlags* pWaitDstStageMask;
+ uint32_t commandBufferCount;
+ const CommandBuffer* pCommandBuffers;
+ uint32_t signalSemaphoreCount;
+ const Semaphore* pSignalSemaphores;
};
- static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
- enum class DebugReportErrorEXT
+ class Queue
{
- eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
- eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
- };
+ public:
+ Queue()
+ : m_queue(VK_NULL_HANDLE)
+ {}
- enum class RasterizationOrderAMD
- {
- eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
- eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
- };
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Queue(VkQueue queue)
+ : m_queue(queue)
+ {}
- struct PipelineRasterizationStateRasterizationOrderAMD
- {
- PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
- : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
- , pNext( nullptr )
- , rasterizationOrder( rasterizationOrder_ )
+ Queue& operator=(VkQueue queue)
+ {
+ m_queue = queue;
+ return *this;
+ }
+#endif
+
+ bool operator==(Queue const &rhs) const
{
+ return m_queue == rhs.m_queue;
}
- PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ bool operator!=(Queue const &rhs) const
{
- memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
+ return m_queue != rhs.m_queue;
}
- PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ bool operator<(Queue const &rhs) const
{
- memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
- return *this;
+ return m_queue < rhs.m_queue;
}
- PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
+ Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
+ {
+ return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
+ {
+ Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, "vk::Queue::submit" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Result waitIdle( ) const
+ {
+ return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+ }
+#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type waitIdle() const
+ {
+ Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+ return createResultValue( result, "vk::Queue::waitIdle" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
+ {
+ return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
{
- sType = sType_;
- return *this;
+ Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, "vk::Queue::bindSparse" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+ Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
{
- pNext = pNext_;
- return *this;
+ return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
}
- PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Result presentKHR( const PresentInfoKHR & presentInfo ) const
{
- rasterizationOrder = rasterizationOrder_;
- return *this;
+ Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
+ return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ explicit
+#endif
+ operator VkQueue() const
{
- return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+ return m_queue;
}
- bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ explicit operator bool() const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( rasterizationOrder == rhs.rasterizationOrder );
+ return m_queue != VK_NULL_HANDLE;
}
- bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ bool operator!() const
{
- return !operator==( rhs );
+ return m_queue == VK_NULL_HANDLE;
}
private:
- StructureType sType;
-
- public:
- const void* pNext;
- RasterizationOrderAMD rasterizationOrder;
- };
- static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
-
- enum class ExternalMemoryHandleTypeFlagBitsNV
- {
- eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
- eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
- eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
- eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+ VkQueue m_queue;
};
-
- using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
-
- inline ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
- }
+ static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
class Device
{
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- explicit
-#endif
- operator VkDevice() const
- {
- return m_device;
- }
-
- explicit operator bool() const
- {
- return m_device != VK_NULL_HANDLE;
- }
-
- bool operator!() const
- {
- return m_device == VK_NULL_HANDLE;
- }
-
- private:
- VkDevice m_device;
- };
- static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
-
- struct ExternalMemoryImageCreateInfoNV
- {
- ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
- : sType( StructureType::eExternalMemoryImageCreateInfoNV )
- , pNext( nullptr )
- , handleTypes( handleTypes_ )
- {
- }
-
- ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
- }
-
- ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
- return *this;
- }
-
- ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
- {
- sType = sType_;
- return *this;
- }
-
- ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
- {
- pNext = pNext_;
- return *this;
- }
-
- ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
- {
- handleTypes = handleTypes_;
- return *this;
- }
-
- operator const VkExternalMemoryImageCreateInfoNV&() const
- {
- return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
- }
-
- bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
- }
-
- bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
- {
- return !operator==( rhs );
- }
-
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- ExternalMemoryHandleTypeFlagsNV handleTypes;
- };
- static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
-
- struct ExportMemoryAllocateInfoNV
- {
- ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
- : sType( StructureType::eExportMemoryAllocateInfoNV )
- , pNext( nullptr )
- , handleTypes( handleTypes_ )
- {
- }
-
- ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
- }
-
- ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
- return *this;
- }
-
- ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
- {
- sType = sType_;
- return *this;
- }
-
- ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
- {
- pNext = pNext_;
- return *this;
- }
-
- ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
- {
- handleTypes = handleTypes_;
- return *this;
- }
-
- operator const VkExportMemoryAllocateInfoNV&() const
- {
- return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
- }
-
- bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
- }
-
- bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
- {
- return !operator==( rhs );
- }
-
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- ExternalMemoryHandleTypeFlagsNV handleTypes;
- };
- static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ImportMemoryWin32HandleInfoNV
- {
- ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
- : sType( StructureType::eImportMemoryWin32HandleInfoNV )
- , pNext( nullptr )
- , handleType( handleType_ )
- , handle( handle_ )
- {
- }
-
- ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
- }
-
- ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
- {
- memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
- return *this;
- }
-
- ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
- {
- sType = sType_;
- return *this;
- }
-
- ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
- {
- pNext = pNext_;
- return *this;
- }
-
- ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
- {
- handleType = handleType_;
- return *this;
- }
-
- ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
- {
- handle = handle_;
- return *this;
- }
-
- operator const VkImportMemoryWin32HandleInfoNV&() const
+ Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
{
- return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+ return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
}
- bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType )
- && ( handle == rhs.handle );
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+ return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+ void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
{
- return !operator==( rhs );
+ vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
- private:
- StructureType sType;
-
- public:
- const void* pNext;
- ExternalMemoryHandleTypeFlagsNV handleType;
- HANDLE handle;
- };
- static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- enum class ExternalMemoryFeatureFlagBitsNV
- {
- eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
- eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
- eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
- };
-
- using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
+ {
+ vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- inline ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
- }
+ Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
+ {
+ return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+ }
- struct ExternalImageFormatPropertiesNV
- {
- ExternalImageFormatPropertiesNV( ImageFormatProperties imageFormatProperties_ = ImageFormatProperties(), ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = ExternalMemoryFeatureFlagsNV(), ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = ExternalMemoryHandleTypeFlagsNV(), ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
- : imageFormatProperties( imageFormatProperties_ )
- , externalMemoryFeatures( externalMemoryFeatures_ )
- , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
- , compatibleHandleTypes( compatibleHandleTypes_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
{
+ ObjectTableNVX objectTable;
+ Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+ return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs )
+ void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
{
- memcpy( this, &rhs, sizeof(ExternalImageFormatPropertiesNV) );
+ vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
- ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const
{
- memcpy( this, &rhs, sizeof(ExternalImageFormatPropertiesNV) );
- return *this;
+ vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- ExternalImageFormatPropertiesNV& setImageFormatProperties( ImageFormatProperties imageFormatProperties_ )
+ Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
{
- imageFormatProperties = imageFormatProperties_;
- return *this;
+ return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
}
- ExternalImageFormatPropertiesNV& setExternalMemoryFeatures( ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
{
- externalMemoryFeatures = externalMemoryFeatures_;
- return *this;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ assert( pObjectTableEntries.size() == objectIndices.size() );
+#else
+ if ( pObjectTableEntries.size() != objectIndices.size() )
+ {
+ throw std::logic_error( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
+ return createResultValue( result, "vk::Device::registerObjectsNVX" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- ExternalImageFormatPropertiesNV& setExportFromImportedHandleTypes( ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ )
+ Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
{
- exportFromImportedHandleTypes = exportFromImportedHandleTypes_;
- return *this;
+ return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
}
- ExternalImageFormatPropertiesNV& setCompatibleHandleTypes( ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
{
- compatibleHandleTypes = compatibleHandleTypes_;
- return *this;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ assert( objectEntryTypes.size() == objectIndices.size() );
+#else
+ if ( objectEntryTypes.size() != objectIndices.size() )
+ {
+ throw std::logic_error( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
+ return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- operator const VkExternalImageFormatPropertiesNV&() const
+#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ explicit
+#endif
+ operator VkDevice() const
{
- return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
+ return m_device;
}
- bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
+ explicit operator bool() const
{
- return ( imageFormatProperties == rhs.imageFormatProperties )
- && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
- && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
- && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ return m_device != VK_NULL_HANDLE;
}
- bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
+ bool operator!() const
{
- return !operator==( rhs );
+ return m_device == VK_NULL_HANDLE;
}
- ImageFormatProperties imageFormatProperties;
- ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
- ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
- ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+ private:
+ VkDevice m_device;
};
- static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+ static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
class PhysicalDevice
{
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
+ {
+ vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, DeviceGeneratedCommandsLimitsNVX & limits ) const
+ {
+ vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
explicit
#endif
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
{
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ assert( layerPrefix.size() == message.size() );
+#else
+ if ( layerPrefix.size() != message.size() )
+ {
+ throw std::logic_error( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
};
static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
- enum class ValidationCheckEXT
- {
- eAll = VK_VALIDATION_CHECK_ALL_EXT
- };
-
- struct ValidationFlagsEXT
+ struct CmdProcessCommandsInfoNVX
{
- ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
- : sType( StructureType::eValidationFlagsEXT )
+ CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
+ : sType( StructureType::eCmdProcessCommandsInfoNVX )
, pNext( nullptr )
- , disabledValidationCheckCount( disabledValidationCheckCount_ )
- , pDisabledValidationChecks( pDisabledValidationChecks_ )
+ , objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+ , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+ , maxSequencesCount( maxSequencesCount_ )
+ , targetCommandBuffer( targetCommandBuffer_ )
+ , sequencesCountBuffer( sequencesCountBuffer_ )
+ , sequencesCountOffset( sequencesCountOffset_ )
+ , sequencesIndexBuffer( sequencesIndexBuffer_ )
+ , sequencesIndexOffset( sequencesIndexOffset_ )
{
}
- ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
+ CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+ memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
}
- ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
+ CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
{
- memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
+ memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
return *this;
}
- ValidationFlagsEXT& setSType( StructureType sType_ )
+ CmdProcessCommandsInfoNVX& setSType( StructureType sType_ )
{
sType = sType_;
return *this;
}
- ValidationFlagsEXT& setPNext( const void* pNext_ )
+ CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
{
pNext = pNext_;
return *this;
}
- ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
+ CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
{
- disabledValidationCheckCount = disabledValidationCheckCount_;
+ objectTable = objectTable_;
return *this;
}
- ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+ CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
{
- pDisabledValidationChecks = pDisabledValidationChecks_;
+ indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
- operator const VkValidationFlagsEXT&() const
+ CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
{
- return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+ indirectCommandsTokenCount = indirectCommandsTokenCount_;
+ return *this;
}
- bool operator==( ValidationFlagsEXT const& rhs ) const
+ CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
+ {
+ pIndirectCommandsTokens = pIndirectCommandsTokens_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+ {
+ maxSequencesCount = maxSequencesCount_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
+ {
+ targetCommandBuffer = targetCommandBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
+ {
+ sequencesCountBuffer = sequencesCountBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
+ {
+ sequencesCountOffset = sequencesCountOffset_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
+ {
+ sequencesIndexBuffer = sequencesIndexBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
+ {
+ sequencesIndexOffset = sequencesIndexOffset_;
+ return *this;
+ }
+
+ operator const VkCmdProcessCommandsInfoNVX&() const
+ {
+ return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
+ }
+
+ bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
- && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+ && ( objectTable == rhs.objectTable )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
+ && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
+ && ( maxSequencesCount == rhs.maxSequencesCount )
+ && ( targetCommandBuffer == rhs.targetCommandBuffer )
+ && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+ && ( sequencesCountOffset == rhs.sequencesCountOffset )
+ && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+ && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
}
- bool operator!=( ValidationFlagsEXT const& rhs ) const
+ bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
{
return !operator==( rhs );
}
public:
const void* pNext;
- uint32_t disabledValidationCheckCount;
- ValidationCheckEXT* pDisabledValidationChecks;
+ ObjectTableNVX objectTable;
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ uint32_t indirectCommandsTokenCount;
+ const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
+ uint32_t maxSequencesCount;
+ CommandBuffer targetCommandBuffer;
+ Buffer sequencesCountBuffer;
+ DeviceSize sequencesCountOffset;
+ Buffer sequencesIndexBuffer;
+ DeviceSize sequencesIndexOffset;
};
- static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+ static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
- inline Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
+ VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
{
return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- inline ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
+ VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
{
Instance instance;
Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkInstance*>( &instance ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- inline Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
- {
- return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Allocator = std::allocator<LayerProperties>>
- typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
- {
- std::vector<LayerProperties,Allocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
- }
- } while ( result == Result::eIncomplete );
- assert( propertyCount <= properties.size() );
- properties.resize( propertyCount );
- return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- inline Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
- {
- return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Allocator = std::allocator<ExtensionProperties>>
- typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
- {
- std::vector<ExtensionProperties,Allocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
- }
- } while ( result == Result::eIncomplete );
- assert( propertyCount <= properties.size() );
- properties.resize( propertyCount );
- return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- inline std::string to_string(FramebufferCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(FramebufferCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
{
return "{}";
}
- inline std::string to_string(QueryPoolCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(QueryPoolCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
{
return "{}";
}
- inline std::string to_string(RenderPassCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(RenderPassCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
{
return "{}";
}
- inline std::string to_string(SamplerCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(SamplerCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineLayoutCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineLayoutCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineCacheCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineCacheCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineDepthStencilStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineDepthStencilStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineDynamicStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineDynamicStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineColorBlendStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineColorBlendStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineMultisampleStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineMultisampleStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineRasterizationStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineRasterizationStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineViewportStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineViewportStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineTessellationStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineTessellationStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineInputAssemblyStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineVertexInputStateCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineVertexInputStateCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
{
return "{}";
}
- inline std::string to_string(PipelineShaderStageCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(PipelineShaderStageCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
{
return "{}";
}
- inline std::string to_string(DescriptorSetLayoutCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(DescriptorSetLayoutCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags)
{
return "{}";
}
- inline std::string to_string(BufferViewCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(BufferViewCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
{
return "{}";
}
- inline std::string to_string(InstanceCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(InstanceCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
{
return "{}";
}
- inline std::string to_string(DeviceCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(DeviceCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
{
return "{}";
}
- inline std::string to_string(DeviceQueueCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(DeviceQueueCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
{
return "{}";
}
- inline std::string to_string(ImageViewCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(ImageViewCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
{
return "{}";
}
- inline std::string to_string(SemaphoreCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(SemaphoreCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
{
return "{}";
}
- inline std::string to_string(ShaderModuleCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(ShaderModuleCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
{
return "{}";
}
- inline std::string to_string(EventCreateFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
{
return "(void)";
}
- inline std::string to_string(EventCreateFlags)
+ VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
{
return "{}";
}
- inline std::string to_string(MemoryMapFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
{
return "(void)";
}
- inline std::string to_string(MemoryMapFlags)
+ VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
{
return "{}";
}
- inline std::string to_string(SubpassDescriptionFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits)
{
return "(void)";
}
- inline std::string to_string(SubpassDescriptionFlags)
+ VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags)
{
return "{}";
}
- inline std::string to_string(DescriptorPoolResetFlagBits)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
{
return "(void)";
}
- inline std::string to_string(DescriptorPoolResetFlags)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
{
return "{}";
}
- inline std::string to_string(SwapchainCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR)
{
return "(void)";
}
- inline std::string to_string(SwapchainCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR)
{
return "{}";
}
- inline std::string to_string(DisplayModeCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
{
return "(void)";
}
- inline std::string to_string(DisplayModeCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
{
return "{}";
}
- inline std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
{
return "(void)";
}
- inline std::string to_string(DisplaySurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
{
return "{}";
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- inline std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- inline std::string to_string(AndroidSurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_MIR_KHR
- inline std::string to_string(MirSurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_MIR_KHR*/
#ifdef VK_USE_PLATFORM_MIR_KHR
- inline std::string to_string(MirSurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_MIR_KHR*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- inline std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- inline std::string to_string(WaylandSurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- inline std::string to_string(Win32SurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- inline std::string to_string(Win32SurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_XLIB_KHR
- inline std::string to_string(XlibSurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#ifdef VK_USE_PLATFORM_XLIB_KHR
- inline std::string to_string(XlibSurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#ifdef VK_USE_PLATFORM_XCB_KHR
- inline std::string to_string(XcbSurfaceCreateFlagBitsKHR)
+ VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
{
return "(void)";
}
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#ifdef VK_USE_PLATFORM_XCB_KHR
- inline std::string to_string(XcbSurfaceCreateFlagsKHR)
+ VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
{
return "{}";
}
#endif /*VK_USE_PLATFORM_XCB_KHR*/
- inline std::string to_string(ImageLayout value)
+ VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
{
switch (value)
{
}
}
- inline std::string to_string(AttachmentLoadOp value)
+ VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
{
switch (value)
{
}
}
- inline std::string to_string(AttachmentStoreOp value)
+ VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageType value)
+ VULKAN_HPP_INLINE std::string to_string(ImageType value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageTiling value)
+ VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageViewType value)
+ VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
{
switch (value)
{
}
}
- inline std::string to_string(CommandBufferLevel value)
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
{
switch (value)
{
}
}
- inline std::string to_string(ComponentSwizzle value)
+ VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
{
switch (value)
{
}
}
- inline std::string to_string(DescriptorType value)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
{
switch (value)
{
}
}
- inline std::string to_string(QueryType value)
+ VULKAN_HPP_INLINE std::string to_string(QueryType value)
{
switch (value)
{
}
}
- inline std::string to_string(BorderColor value)
+ VULKAN_HPP_INLINE std::string to_string(BorderColor value)
{
switch (value)
{
}
}
- inline std::string to_string(PipelineBindPoint value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
{
switch (value)
{
}
}
- inline std::string to_string(PipelineCacheHeaderVersion value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
{
switch (value)
{
}
}
- inline std::string to_string(PrimitiveTopology value)
+ VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
{
switch (value)
{
}
}
- inline std::string to_string(SharingMode value)
+ VULKAN_HPP_INLINE std::string to_string(SharingMode value)
{
switch (value)
{
}
}
- inline std::string to_string(IndexType value)
+ VULKAN_HPP_INLINE std::string to_string(IndexType value)
{
switch (value)
{
}
}
- inline std::string to_string(Filter value)
+ VULKAN_HPP_INLINE std::string to_string(Filter value)
{
switch (value)
{
}
}
- inline std::string to_string(SamplerMipmapMode value)
+ VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
{
switch (value)
{
}
}
- inline std::string to_string(SamplerAddressMode value)
+ VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
{
switch (value)
{
}
}
- inline std::string to_string(CompareOp value)
+ VULKAN_HPP_INLINE std::string to_string(CompareOp value)
{
switch (value)
{
}
}
- inline std::string to_string(PolygonMode value)
+ VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
{
switch (value)
{
}
}
- inline std::string to_string(CullModeFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(CullModeFlags value)
+ VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(FrontFace value)
+ VULKAN_HPP_INLINE std::string to_string(FrontFace value)
{
switch (value)
{
}
}
- inline std::string to_string(BlendFactor value)
+ VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
{
switch (value)
{
}
}
- inline std::string to_string(BlendOp value)
+ VULKAN_HPP_INLINE std::string to_string(BlendOp value)
{
switch (value)
{
}
}
- inline std::string to_string(StencilOp value)
+ VULKAN_HPP_INLINE std::string to_string(StencilOp value)
{
switch (value)
{
}
}
- inline std::string to_string(LogicOp value)
+ VULKAN_HPP_INLINE std::string to_string(LogicOp value)
{
switch (value)
{
}
}
- inline std::string to_string(InternalAllocationType value)
+ VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
{
switch (value)
{
}
}
- inline std::string to_string(SystemAllocationScope value)
+ VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
{
switch (value)
{
}
}
- inline std::string to_string(PhysicalDeviceType value)
+ VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
{
switch (value)
{
}
}
- inline std::string to_string(VertexInputRate value)
+ VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
{
switch (value)
{
}
}
- inline std::string to_string(Format value)
+ VULKAN_HPP_INLINE std::string to_string(Format value)
{
switch (value)
{
}
}
- inline std::string to_string(StructureType value)
+ VULKAN_HPP_INLINE std::string to_string(StructureType value)
{
switch (value)
{
case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
+ case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
+ case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
+ case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
+ case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
+ case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
+ case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
default: return "invalid";
}
}
- inline std::string to_string(SubpassContents value)
+ VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
{
switch (value)
{
}
}
- inline std::string to_string(DynamicState value)
+ VULKAN_HPP_INLINE std::string to_string(DynamicState value)
{
switch (value)
{
}
}
- inline std::string to_string(QueueFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(QueueFlags value)
+ VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(MemoryPropertyFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(MemoryPropertyFlags value)
+ VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(MemoryHeapFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(MemoryHeapFlags value)
+ VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(AccessFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
{
switch (value)
{
case AccessFlagBits::eHostWrite: return "HostWrite";
case AccessFlagBits::eMemoryRead: return "MemoryRead";
case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
+ case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
+ case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
default: return "invalid";
}
}
- inline std::string to_string(AccessFlags value)
+ VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
{
if (!value) return "{}";
std::string result;
if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
+ if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
+ if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(BufferUsageFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(BufferUsageFlags value)
+ VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(BufferCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(BufferCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ShaderStageFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(ShaderStageFlags value)
+ VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ImageUsageFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageUsageFlags value)
+ VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ImageCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(PipelineCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(PipelineCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ColorComponentFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(ColorComponentFlags value)
+ VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(FenceCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(FenceCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(FormatFeatureFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(FormatFeatureFlags value)
+ VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(QueryControlFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(QueryControlFlags value)
+ VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(QueryResultFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(QueryResultFlags value)
+ VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(CommandBufferUsageFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(CommandBufferUsageFlags value)
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(QueryPipelineStatisticFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(QueryPipelineStatisticFlags value)
+ VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ImageAspectFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(ImageAspectFlags value)
+ VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(SparseImageFormatFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(SparseImageFormatFlags value)
+ VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(SparseMemoryBindFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(SparseMemoryBindFlags value)
+ VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(PipelineStageFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
{
switch (value)
{
case PipelineStageFlagBits::eHost: return "Host";
case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
case PipelineStageFlagBits::eAllCommands: return "AllCommands";
+ case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
default: return "invalid";
}
}
- inline std::string to_string(PipelineStageFlags value)
+ VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
{
if (!value) return "{}";
std::string result;
if (value & PipelineStageFlagBits::eHost) result += "Host | ";
if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
+ if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(CommandPoolCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(CommandPoolCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(CommandPoolResetFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(CommandPoolResetFlags value)
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(CommandBufferResetFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(CommandBufferResetFlags value)
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(SampleCountFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(SampleCountFlags value)
+ VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(AttachmentDescriptionFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(AttachmentDescriptionFlags value)
+ VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(StencilFaceFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(StencilFaceFlags value)
+ VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(DescriptorPoolCreateFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(DescriptorPoolCreateFlags value)
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(DependencyFlagBits value)
+ VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
{
switch (value)
{
}
}
- inline std::string to_string(DependencyFlags value)
+ VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(PresentModeKHR value)
+ VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
{
switch (value)
{
}
}
- inline std::string to_string(ColorSpaceKHR value)
+ VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
{
switch (value)
{
}
}
- inline std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
{
switch (value)
{
}
}
- inline std::string to_string(DisplayPlaneAlphaFlagsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(CompositeAlphaFlagBitsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
{
switch (value)
{
}
}
- inline std::string to_string(CompositeAlphaFlagsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(SurfaceTransformFlagBitsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
{
switch (value)
{
}
}
- inline std::string to_string(SurfaceTransformFlagsKHR value)
+ VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(DebugReportFlagBitsEXT value)
+ VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
{
switch (value)
{
}
}
- inline std::string to_string(DebugReportFlagsEXT value)
+ VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(DebugReportObjectTypeEXT value)
+ VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
{
switch (value)
{
case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
case DebugReportObjectTypeEXT::eDebugReport: return "DebugReport";
+ case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
+ case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
+ case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
+ case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
default: return "invalid";
}
}
- inline std::string to_string(DebugReportErrorEXT value)
+ VULKAN_HPP_INLINE std::string to_string(DebugReportErrorEXT value)
{
switch (value)
{
}
}
- inline std::string to_string(RasterizationOrderAMD value)
+ VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
{
switch (value)
{
}
}
- inline std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
{
switch (value)
{
}
}
- inline std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
{
switch (value)
{
}
}
- inline std::string to_string(ExternalMemoryFeatureFlagsNV value)
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
{
if (!value) return "{}";
std::string result;
return "{" + result.substr(0, result.size() - 3) + "}";
}
- inline std::string to_string(ValidationCheckEXT value)
+ VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
{
switch (value)
{
}
}
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
+ {
+ switch (value)
+ {
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
+ {
+ switch (value)
+ {
+ case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
+ case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
+ if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
+ {
+ switch (value)
+ {
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline: return "VkIndirectCommandsTokenPipeline";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDescriptorSet: return "VkIndirectCommandsTokenDescriptorSet";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenIndexBuffer: return "VkIndirectCommandsTokenIndexBuffer";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenVertexBuffer: return "VkIndirectCommandsTokenVertexBuffer";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPushConstant: return "VkIndirectCommandsTokenPushConstant";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDrawIndexed: return "VkIndirectCommandsTokenDrawIndexed";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDraw: return "VkIndirectCommandsTokenDraw";
+ case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDispatch: return "VkIndirectCommandsTokenDispatch";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
+ {
+ switch (value)
+ {
+ case ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet: return "VkObjectEntryDescriptorSet";
+ case ObjectEntryTypeNVX::eVkObjectEntryPipeline: return "VkObjectEntryPipeline";
+ case ObjectEntryTypeNVX::eVkObjectEntryIndexBuffer: return "VkObjectEntryIndexBuffer";
+ case ObjectEntryTypeNVX::eVkObjectEntryVertexBuffer: return "VkObjectEntryVertexBuffer";
+ case ObjectEntryTypeNVX::eVkObjectEntryPushConstant: return "VkObjectEntryPushConstant";
+ default: return "invalid";
+ }
+ }
+
} // namespace vk
#endif